Ecosystem size is a key factor driving biodiversity and ecosystem function. Larger ecosystems contain more species and can be hubs of dispersal and resource flows in networks of multiple ecosystems. However, whether and how ecosystem size and resource flows interact to affect biodiversity and ecosystem function has been largely overlooked. Here, we investigated how ecosystem size asymmetry affects biodiversity and function of two-ecosystem meta-ecosystems connected through flows of non-living resources. We conducted microcosm experiments, mimicking resource flows between ecosystems of different sizes, yet otherwise being identical. We found that meta-ecosystems with asymmetric ecosystem sizes had higher β-diversity but lower α-diversity and ecosystem function (total biomass) than their unconnected counterparts, while such an effect was not found for meta-ecosystems of identical ecosystem sizes. Our work demonstrates of how cross-ecosystem dynamics modulated by differences in ecosystem sizes affect biodiversity and function, with a direct implication for conservation and management of connected ecosystems.
# --- SET UP R MARKDOWN AND GENERAL CODING RUNNING PARAMETERS --- #
set.seed(420)
start_time = Sys.time()
knitr::opts_chunk$set(message = FALSE,
cache = FALSE,
autodep = FALSE)
recompute_lengthy_analyses = TRUE
plot_model_residuals_metaecos = FALSE
# --- SET UP PARAMETERS RELATED TO RESOURCE FLOWS --- #
disturbance_levels = c("low", "high")
n_disturbance_levels = length(disturbance_levels)
resource_flow_days = c(5, 9, 13, 17, 21, 25)
first_resource_flow = resource_flow_days[1]
# --- SET UP SAMPLING PARAMETERS --- #
total_frames = 125
volume_recorded_μl = 34.4
time_points = 0:7
time_points_without_t0 = 1:7
time_point_names = c("t0", "t1", "t2", "t3", "t4", "t5", "t6", "t7")
sampling_days = c(0, 4, 8, 12, 16, 20, 24, 28)
first_time_point = 0
last_time_point = length(sampling_days) - 1
n_time_points = last_time_point + 1
nr_videos = c(12, 1, 1, 1, 1, 1, 2, 2) #Videos taken for each time point for each culture. At t0 we took 12 videos of the large bottle from which we started the cultures. Write why 2 at the end.
videos_taken = data.frame(time_point = 0 : 7,
nr_videos = c(12, 1, 1, 1, 1, 1, 2, 2))
n_videos_taken_t0 = nr_videos[1]
time_point_day = data.frame(time_point = first_time_point:last_time_point,
day = sampling_days,
video_replicates = nr_videos)
videos_to_take_off = data.frame(culture_ID = NA,
time_point = NA,
file = NA) %>%
add_row(culture_ID = 137-110,
time_point = 7,
file = 137) %>%
slice(-1)
n_cultures = 110
total_number_of_video_rows = sum(nr_videos * n_cultures)
# --- SET UP PROTIST PARAMETERS --- #
protist_species = c("Ble", "Cep", "Col", "Eug", "Eup", "Lox", "Pau", "Pca", "Spi", "Spi_te", "Tet")
protist_species_indiv_per_volume = paste0(protist_species, "_indiv_per_volume")
protist_species_indiv_per_ml = paste0(protist_species, "_indiv_per_ml")
protist_species_dominance = paste0(protist_species_indiv_per_ml, "_dominance")
protist_species_total = paste0(protist_species, "_tot_indiv")
n_protist_species = length(protist_species)
first_protist = protist_species[1]
last_protist = protist_species[n_protist_species]
species_IDD_with_13_threshold = c("Col", "Eug", "Eup", "Lox", "Pau", "Pca", "Spi_te", "Tet")
species_IDD_with_13_threshold_indiv_per_volume = paste0(species_IDD_with_13_threshold, "_indiv_per_volume")
species_IDD_with_40_threshold = c("Ble", "Cep", "Spi")
species_IDD_with_40_threshold_indiv_per_volume = paste0(species_IDD_with_40_threshold, "_indiv_per_volume")
# --- SET UP ECOSYSTEM PARAMETERS --- #
ecosystems_to_take_off = 60 #Culture ID = 60 as it was spilled (small unconnected, high disturbance, system nr = 40)
ecosystems_info = read.csv(here("1_data", "ecosystems_info.csv"), header = TRUE)
columns_ecosystems = c("time_point",
"day",
"culture_ID",
"system_nr",
"disturbance",
"ecosystem_type",
"connection",
"ecosystem_size",
"ecosystem_size_ml",
"metaecosystem",
"metaecosystem_type")
columns_treatments = columns_ecosystems[!columns_ecosystems %in% c("system_nr", "culture_ID")]
variables_ecosystems = c("bioarea_mm2_per_ml",
"bioarea_tot_mm2",
"indiv_per_ml",
"indiv_tot",
"species_richness",
"shannon",
"simpson",
"inv_simpson",
"evenness_pielou",
"median_body_area_µm2",
paste0(protist_species, "_indiv_per_ml"),
paste0(protist_species, "_tot_indiv"),
paste0(protist_species_indiv_per_ml, "_dominance"))
baseline_columns = paste0("baseline_", variables_ecosystems)
ecosystem_types_ordered = c("Small connected to large",
"Small connected to small",
"Small unconnected",
"Medium connected to medium",
"Medium unconnected",
"Large connected to small",
"Large connected to large",
"Large unconnected")
treatments_and_controls = data.frame(treatment = c("Small connected to small",
"Small connected to large",
"Medium connected to medium",
"Large connected to large",
"Large connected to small"),
control = c("Small unconnected",
"Small unconnected",
"Medium unconnected",
"Large unconnected",
"Large unconnected"))
n_treatments = length(unique(treatments_and_controls$treatment))
n_controls = length(unique(treatments_and_controls$control))
n_replicates = 5
n_ecosystem_types = 8
# --- SET UP SIZE CLASSES PARAMETERS --- #
n_size_classes = 12
columns_classes = c(columns_ecosystems,
"size_class_n",
"mean_class_area_µm2")
# --- SET UP META-ECOSYSTEM PARAMETERS --- #
metaecosystems_to_take_off = ecosystems_info %>%
filter(culture_ID %in% ecosystems_to_take_off) %>%
pull(system_nr) %>%
unique
system_nr_metaecosystems = ecosystems_info %>%
filter(metaecosystem == "yes") %>%
pull(system_nr) %>%
unique
n_metaecosystems = length(system_nr_metaecosystems)
variables_metaecos = c("total_metaecosystem_bioarea_mm2",
"jaccard_index",
"bray_curtis",
"beta_spatial_turnover",
"beta_nestedness",
"beta_total",
"metaecosystem_richness")
metaecosystem_types_ordered = c("Small-Small meta-ecosystem",
"Medium-Medium meta-ecosystem",
"Medium-Medium unconnected",
"Large-Large meta-ecosystem",
"Small-Large meta-ecosystem",
"Small-Large unconnected")
metaecosystem_type_selected = c("Medium-Medium",
"Small-Large")
Name of the axes per response variable.
# --- SET UP AXES LABELS PARAMETERS --- #
axis_names = data.frame(variable = NA,
axis_name= NA) %>%
add_row(variable = "day", axis_name = "Time (day)") %>%
add_row(variable = "ecosystem_size_ml", axis_name = "Patch size (ml)") %>%
add_row(variable = "log_size_class", axis_name = "Log size (μm2)") %>%
add_row(variable = "class_indiv_per_µl", axis_name = "Density (ind/ml)") %>%
add_row(variable = "bioarea_mm2_per_ml", axis_name = "Biomass (mm2/ml)") %>%
add_row(variable = "sqrt_bioarea_mm2_per_ml", axis_name = "Sqrt Biomass (mm2/ml)") %>%
add_row(variable = "bioarea_mm2_per_ml_d", axis_name = "Bioamass ES") %>%
add_row(variable = "bioarea_tot", axis_name = "Total Biomass (mm2)") %>%
add_row(variable = "total_metaecosystem_bioarea_mm2", axis_name = "Total Biomass (mm2)") %>%
add_row(variable = "species_richness", axis_name = "Species Richness") %>%
add_row(variable = "species_richness_d", axis_name = "Species Richness ES") %>%
add_row(variable = "mean_richness", axis_name = "Mean α-Diversity (Shannon)") %>%
add_row(variable = "mean_shannon", axis_name = "Mean α-Diversity (Shannon)") %>%
add_row(variable = "shannon", axis_name = "Biodiversity (Shannon)") %>%
add_row(variable = "sqrt_shannon", axis_name = "Sqrt Biodiversity (Shannon)") %>%
add_row(variable = "shannon_d", axis_name = "Biodiversity ES (Shannon ES)") %>%
add_row(variable = "bray_curtis", axis_name = "β-Diversity (Bray-Curtis)") %>%
add_row(variable = "beta_spatial_turnover", axis_name = "Turn over (Simpson pair-wise dissimilarity)") %>%
add_row(variable = "beta_nestedness", axis_name = "Nestedness (nestedness-fraction of Sorensen)") %>%
add_row(variable = "beta_total", axis_name = "Tot β-Diversity (Sorensen)") %>%
add_row(variable = "metaecosystem_richness", axis_name = "γ-Diversity (Species Richness)") %>%
add_row(variable = "indiv_per_ml", axis_name = "Abundance (ind/ml)") %>%
add_row(variable = "indiv_per_ml_d", axis_name = "Abundance ES") %>%
add_row(variable = "median_body_area_µm2", axis_name = "Median Body Size (µm²)") %>%
add_row(variable = "median_body_area_µm2_d", axis_name = "Median Body Size ES") %>%
add_row(variable = "Ble_indiv_per_ml", axis_name = "Ble Density (ind/ml)") %>%
add_row(variable = "Cep_indiv_per_ml", axis_name = "Cep Density (ind/ml)") %>%
add_row(variable = "Col_indiv_per_ml", axis_name = "Col Density (ind/ml)") %>%
add_row(variable = "Eug_indiv_per_ml", axis_name = "Eug Density (ind/ml)") %>%
add_row(variable = "Eup_indiv_per_ml", axis_name = "Eup Density (ind/ml)") %>%
add_row(variable = "Lox_indiv_per_ml", axis_name = "Lox Density (ind/ml)") %>%
add_row(variable = "Pau_indiv_per_ml", axis_name = "Pau Density (ind/ml)") %>%
add_row(variable = "Pca_indiv_per_ml", axis_name = "Pca Density (ind/ml)") %>%
add_row(variable = "Spi_indiv_per_ml", axis_name = "Spi Density (ind/ml)") %>%
add_row(variable = "Spi_te_indiv_per_ml", axis_name = "Spi te Density (ind/ml)") %>%
add_row(variable = "Tet_indiv_per_ml", axis_name = "Tet Density (ind/ml)") %>%
add_row(variable = "auto_hetero_ratio", axis_name = "Photosynthetisers-Heterotrops Ratio") %>%
add_row(variable = "sqrt_auto_hetero_ratio", axis_name = "Sqrt Photosynthetisers-Heterotrops Ratio") %>%
add_row(variable = "Ble_indiv_per_ml_d", axis_name = "Ble Density ES") %>%
add_row(variable = "Cep_indiv_per_ml_d", axis_name = "Cep Density ES") %>%
add_row(variable = "Col_indiv_per_ml_d", axis_name = "Col Density ES") %>%
add_row(variable = "Eug_indiv_per_ml_d", axis_name = "Eug Density ES") %>%
add_row(variable = "Eup_indiv_per_ml_d", axis_name = "Eup Density ES") %>%
add_row(variable = "Lox_indiv_per_ml_d", axis_name = "Lox Density ES") %>%
add_row(variable = "Pau_indiv_per_ml_d", axis_name = "Pau Density ES") %>%
add_row(variable = "Pca_indiv_per_ml_d", axis_name = "Pca Density ES") %>%
add_row(variable = "Spi_indiv_per_ml_d", axis_name = "Spi Density ES") %>%
add_row(variable = "Spi_te_indiv_per_ml_d", axis_name = "Spi te Density ES") %>%
add_row(variable = "Tet_indiv_per_ml_d", axis_name = "Tet Density ES") %>%
add_row(variable = "Ble_indiv_per_ml_dominance", axis_name = "Ble Dominance (%)") %>%
add_row(variable = "Cep_indiv_per_ml_dominance", axis_name = "Cep Dominance (%)") %>%
add_row(variable = "Col_indiv_per_ml_dominance", axis_name = "Col Dominance (%)") %>%
add_row(variable = "Eug_indiv_per_ml_dominance", axis_name = "Eug Dominance (%)") %>%
add_row(variable = "Eup_indiv_per_ml_dominance", axis_name = "Eup Dominance (%)") %>%
add_row(variable = "Lox_indiv_per_ml_dominance", axis_name = "Lox Dominance (%)") %>%
add_row(variable = "Pau_indiv_per_ml_dominance", axis_name = "Pau Dominance (%)") %>%
add_row(variable = "Pca_indiv_per_ml_dominance", axis_name = "Pca Dominance (%)") %>%
add_row(variable = "Spi_indiv_per_ml_dominance", axis_name = "Spi Dominance (%)") %>%
add_row(variable = "Spi_te_indiv_per_ml_dominance", axis_name = "Spi te Dominance (%)") %>%
add_row(variable = "Tet_indiv_per_ml_dominance", axis_name = "Tet Dominance (%)") %>%
add_row(variable = "Ble_indiv_per_ml_dominance_d", axis_name = "Ble Dominance ES") %>%
add_row(variable = "Cep_indiv_per_ml_dominance_d", axis_name = "Cep Dominance ES") %>%
add_row(variable = "Col_indiv_per_ml_dominance_d", axis_name = "Col Dominance ES") %>%
add_row(variable = "Eug_indiv_per_ml_dominance_d", axis_name = "Eug Dominance ES") %>%
add_row(variable = "Eup_indiv_per_ml_dominance_d", axis_name = "Eup Dominance ES") %>%
add_row(variable = "Lox_indiv_per_ml_dominance_d", axis_name = "Lox Dominance ES") %>%
add_row(variable = "Pau_indiv_per_ml_dominance_d", axis_name = "Pau Dominance ES") %>%
add_row(variable = "Pca_indiv_per_ml_dominance_d", axis_name = "Pca Dominance ES") %>%
add_row(variable = "Sp_indiv_per_mli_dominance_d", axis_name = "Spi Dominance ES") %>%
add_row(variable = "Spi_te_indiv_per_ml_dominance_d", axis_name = "Spi te Dominance ES") %>%
add_row(variable = "Tet_indiv_per_ml_dominance_d", axis_name = "Tet Dominance ES") %>%
add_row(variable = "dominance", axis_name = "Dominance (%)") %>%
add_row(variable = "log_abundance", axis_name = "Log Abundance + 1 (ind/mm²)") %>%
add_row(variable = "abundance_hedges_d", axis_name = "Density ES") %>%
add_row(variable = "beta_diversity_from_unconnected", axis_name = "Divergence from unconnected") %>%
add_row(variable = "beta_diversity_from_previous_time", axis_name = "Temporal Divergence") %>%
add_row(variable = "beta_diversity_from_previous_time_d", axis_name = "Temporal Divergence ES") %>%
add_row(variable = "evenness_pielou", axis_name = "Evenness") %>%
add_row(variable = "evenness_pielou_d", axis_name = "Evenness ES") %>%
slice(-1)
# --- SET UP COLOUR AND LINE TYPER PER ECOSYSTEM/META-ECOSYSTEM PARAMETERS --- #
treatment_colours = c("Small" = "#feb24c",
"Medium" = "#1b7837",
"Large" = "#3182bd",
"Small-Small" = "#fc9272",
"Large-Large" = "#67000d",
"Small-Large" = "#762a83",
"Medium-Medium" = "#1b7837",
"symmetric" = "#1b7837",
"asymmetric" = "#762a83")
treatment_colours_paper = c("symmetric" = "#1b7837",
"asymmetric" = "#762a83")
treatment_linetype_paper = c("connected" = "solid",
"unconnected" = "dashed")
treatment_linetype = c("connected to small" = "solid",
"connected to medium" = "dashed",
"connected to large" = "longdash",
"connected" = "solid",
"unconnected" = "dotted")
# --- SET UP PLOTTING PARAMETERS --- #
figures_height_rmd_output = 7
legend_position = "top"
legend_width_cm = 2
size_legend = 12
size_x_axis = 13
size_y_axis = size_x_axis
boxplot_width = 2
dodging = 0.5
width_errorbar = 0.2
dodging_error_bar = 0.5
treatment_lines_linewidth = 1
treatment_points_size = 2.5
resource_flow_line_type = "solid"
resource_flow_line_colour = "#d9d9d9"
resource_flow_line_width = 0.3
zero_line_colour = "grey"
zero_line_line_type = "dotted"
zero_line_line_width = 0.5
zero_line_ES_line_type = "dotted"
zero_line_ES_colour = "grey"
zero_line_ES_line_width = 1
ggarrange_margin_top = 0
ggarrange_margin_bottom = 0
ggarrange_margin_left = 0
ggarrange_margin_right = 0
paper_width = 17.3
paper_height = 20
paper_units = "cm"
paper_res = 600
paper_y_axis_size = 9
paper_labels_size = 9
presentation_figure_size = 15
presentation_figure_width = 30
presentation_figure_height = 22
presentation_legend_size = 20
presentation_x_axis_size = 22
presentation_y_axis_size = presentation_x_axis_size
presentation_axes_size = 12
presentation_treatment_points_size = 5
presentation_treatment_linewidth = 2
presentation_figure_units = "cm"
presentation_figure_res = 600
grey_background_xmin = -Inf
grey_background_xmax = 7.5
grey_background_ymin = -Inf
grey_background_ymax = Inf
grey_background_fill = "#f0f0f0"
grey_background_alpha = 0.03
grey_background_color = "transparent"
# --- SET UP MODELLING PARAMETERS --- #
time_point_of_baselines = 1
time_points_with_water_addtion = 3:7
time_points_model = 2:7
ecosystems_info)# --- IMPORT ECOSYSTEM INFORMATION --- #
ecosystems_info = read.csv(here("1_data", "ecosystems_info.csv"), header = TRUE)
In this dataset (ds_individuals) each row represents an
individual at a time point.
# --- IMPORT T0 --- #
# Import the individual data of t0. We considered cultures to be all the same at the beginning (t0). Because of this reason, we filmed only the bottles from which cultures were assembled. Because we want to plot also t0 for the different treatments, we want to assign the video of bottles to all cultures at t0.
ds_individuals_t0_not_elongated = read.csv(here("1_data",
"individuals_13_threshold",
"t0.csv")) %>%
mutate(time_point = as.numeric(str_extract(time_point, "\\d+")),
day = 0,
file = as.numeric(str_extract(file, "\\d+")),
video_replicate = file) %>%
select(time_point,
day,
video_replicate,
file,
id,
N_frames,
mean_area)
# Elongate t0
ds_individuals_t0_elongated = ds_individuals_t0_not_elongated %>%
map_dfr(.x = 1 : nrow(ecosystems_info),
.f = ~ ds_individuals_t0_not_elongated) %>%
arrange(id) %>% #Id refers to an individual
mutate(culture_ID = rep(1 : nrow(ecosystems_info),
times = nrow(ds_individuals_t0_not_elongated))) %>%
select(time_point,
day,
video_replicate,
file,
culture_ID,
id,
N_frames,
mean_area)
expect_equal(nrow(ds_individuals_t0_not_elongated) * nrow(ecosystems_info),
nrow(ds_individuals_t0_elongated))
# --- IMPORT ALL TIME POINTS BUT T0 --- #
# Set up parameters
ds_individuals_t1_to_t7 = NULL
# Import all time points but t0
for (time_point_i in time_points_without_t0) {
ds_individuals_t1_to_t7[[time_point_i]] = read.csv(here("1_data",
"individuals_13_threshold",
paste0("t",
time_point_i,
".csv"))) %>%
mutate(time_point = as.numeric(str_extract(time_point, "\\d+")),
day = time_point_day$day[time_point_day$time_point == time_point_i],
file = as.numeric(str_extract(file, "\\d+")),
video_replicate = ceiling(file/n_cultures)) #Until 110 video replicate = 1, then 2
}
# Tidy up all time points but t0
ds_individuals_t1_to_t7 = ds_individuals_t1_to_t7 %>%
bind_rows() %>%
select(time_point,
day,
video_replicate,
file,
culture_ID,
id,
N_frames,
mean_area)
# --- BIND T0 WITH OTHER TIME POINTS --- #
ds_individuals = rbind(ds_individuals_t0_elongated,
ds_individuals_t1_to_t7) %>%
left_join(ecosystems_info,
by = "culture_ID")
# --- RENAME AND SELECT COLUMNS --- #
ds_individuals = ds_individuals %>%
rename(ecosystem_size = patch_size,
ecosystem_size_volume = patch_size_volume) %>%
select(disturbance,
disturbance_volume,
time_point,
day,
video_replicate,
culture_ID,
system_nr,
file,
eco_metaeco_type,
ecosystem_size,
ecosystem_size_volume,
metaecosystem,
metaecosystem_type,
mean_area,
N_frames) %>%
rename(ecosystem_size_ml = ecosystem_size_volume,
ecosystem_type = eco_metaeco_type,
body_area_µm2 = mean_area)
# --- RENAME AND REORDER LEVELS --- #
ds_individuals <- ds_individuals %>%
mutate(ecosystem_type = case_when(ecosystem_type == "S" ~ "Small unconnected",
ecosystem_type == "M" ~ "Medium unconnected",
ecosystem_type == "L" ~ "Large unconnected",
ecosystem_type == "S (S_S)" ~ "Small connected to small",
ecosystem_type == "S (S_L)" ~ "Small connected to large",
ecosystem_type == "M (M_M)" ~ "Medium connected to medium",
ecosystem_type == "L (S_L)" ~ "Large connected to small",
ecosystem_type == "L (L_L)" ~ "Large connected to large",
TRUE ~ ecosystem_type),
ecosystem_type = factor(ecosystem_type,
levels = ecosystem_types_ordered),
ecosystem_size = case_when(ecosystem_size == "S" ~ "Small",
ecosystem_size == "M" ~ "Medium",
ecosystem_size == "L" ~ "Large",
TRUE ~ ecosystem_type),
ecosystem_size = factor(ecosystem_size,
levels = "Small",
"Medium",
"Large"),
size_connected_ecosystem = case_when(ecosystem_type == "Small connected to small" ~ "Small",
ecosystem_type == "Small connected to large" ~ "Large",
ecosystem_type == "Medium connected to medium" ~ "Medium",
ecosystem_type == "Large connected to large" ~ "Large",
ecosystem_type == "Large connected to small" ~ "Small",
TRUE ~ NA_character_))
# --- TAKE OFF PROBLEMATIC VIDEOS --- #
# Set up parameters for testing
ds_individuals_before_taking_off_videos = ds_individuals
# Take off problematic videos
ds_individuals = ds_individuals %>%
filter(!(time_point %in% videos_to_take_off$time_point & file %in% videos_to_take_off$file))
# Test code
diff = setdiff(ds_individuals_before_taking_off_videos, ds_individuals)
expect_equal(nrow(videos_to_take_off),
nrow(expand.grid(diff$culture_ID, diff$time_point, diff$file) %>% unique()))
# --- TAKE OFF PROBLEMATIC ECOSYSTEMS --- #
# Set up parameters for testing
ds_individuals_before_taking_off_cultures = ds_individuals
# Take off problematic ecosystems
ds_individuals = ds_individuals %>%
filter(!culture_ID %in% ecosystems_to_take_off)
# Test code
expect_equal(setdiff(ds_individuals_before_taking_off_cultures,
ds_individuals) %>%
pull(culture_ID) %>%
unique(),
ecosystems_to_take_off)
ds_ecosystems)In this dataset (ds_ecosystems) each row represents a
ecosystem at a time point. I use the data from the 40 threshold analysis
for Ble, Cep, Spi and the data from the 13 threshold analysis for all
the other protists (Col, Eup, Lox, Pau, Pca, Spi te, Tet).
# --- IMPORT & BIND T0 DATASETS --- #
# Import t0
ds_ecosystems_t0 = read.csv(here("1_data",
"ecosystems_13_threshold",
"t0.csv")) %>%
mutate(time_point = as.numeric(str_extract(time_point, "\\d+")),
day = 0,
video_replicate = file) %>%
select(time_point,
day,
video_replicate,
file,
bioarea_per_volume,
indiv_per_volume)
# Describe code
species_ID_13_threshold_t0 = read.csv(here("1_data",
"species_ID_13_threshold",
paste0("t0.csv"))) %>%
rename(Ble_indiv_per_volume = Ble,
Cep_indiv_per_volume = Cep,
Col_indiv_per_volume = Col,
Eug_indiv_per_volume = Eug,
Eup_indiv_per_volume = Eup,
Lox_indiv_per_volume = Lox,
Pau_indiv_per_volume = Pau,
Pca_indiv_per_volume = Pca,
Spi_indiv_per_volume = Spi,
Spi_te_indiv_per_volume = Spi_te,
Tet_indiv_per_volume = Tet) %>%
select(file,
all_of(species_IDD_with_13_threshold_indiv_per_volume))
species_ID_40_threshold_t0 = read.csv(here("1_data",
"species_ID_40_threshold",
paste0("t0.csv"))) %>%
rename(Ble_indiv_per_volume = Ble,
Cep_indiv_per_volume = Cep,
Col_indiv_per_volume = Col,
Eug_indiv_per_volume = Eug,
Eup_indiv_per_volume = Eup,
Lox_indiv_per_volume = Lox,
Pau_indiv_per_volume = Pau,
Pca_indiv_per_volume = Pca,
Spi_indiv_per_volume = Spi,
Spi_te_indiv_per_volume = Spi_te,
Tet_indiv_per_volume = Tet) %>%
select(file,
all_of(species_IDD_with_40_threshold_indiv_per_volume))
# Describe code
ds_ecosystems_t0 = ds_ecosystems_t0 %>%
left_join(species_ID_13_threshold_t0,
by = "file") %>%
left_join(species_ID_40_threshold_t0,
by = "file") %>%
mutate(file = as.numeric(str_extract(file, "\\d+")))
# --- ELONGATE T0 DATASET --- #
# Set up parameters
ds_ecosystems_t0_elongated <- list()
# Describe code
for (video_i in 1 : n_videos_taken_t0) {
# Describe code
single_video = ds_ecosystems_t0 %>%
filter(file == video_i)
# Describe code
ds_ecosystems_t0_elongated[[video_i]] = ecosystems_info %>%
mutate(time_point = 0,
day = 0,
file = single_video$file,
video_replicate = single_video$video_replicate,
bioarea_per_volume = single_video$bioarea_per_volume,
indiv_per_volume = single_video$indiv_per_volume,
Ble_indiv_per_volume = single_video$Ble_indiv_per_volume,
Cep_indiv_per_volume = single_video$Cep_indiv_per_volume,
Col_indiv_per_volume = single_video$Col_indiv_per_volume,
Eug_indiv_per_volume = single_video$Eug_indiv_per_volume,
Eup_indiv_per_volume = single_video$Eup_indiv_per_volume,
Lox_indiv_per_volume = single_video$Lox_indiv_per_volume,
Pau_indiv_per_volume = single_video$Pau_indiv_per_volume,
Pca_indiv_per_volume = single_video$Pca_indiv_per_volume,
Spi_indiv_per_volume = single_video$Spi_indiv_per_volume,
Spi_te_indiv_per_volume = single_video$Spi_te_indiv_per_volume,
Tet_indiv_per_volume = single_video$Tet_indiv_per_volume)
}
# Describe code
ds_ecosystems_t0_elongated = ds_ecosystems_t0_elongated %>%
bind_rows()
# --- CLEAN THE COLUMNS OF T0 --- #
ds_ecosystems_t0 = ds_ecosystems_t0_elongated %>%
select(file,
time_point,
day,
culture_ID,
video_replicate,
bioarea_per_volume,
indiv_per_volume,
all_of(protist_species_indiv_per_volume))
expect_equal(nrow(ds_ecosystems_t0),
sum(n_videos_taken_t0 * n_cultures))
# --- IMPORT AND BIND ALL TIME POINTS BUT T0 --- #
ds_ecosystems_t1_to_t7 = NULL
for (time_point_i in time_points_without_t0) {
species_ID_13_threshold = read.csv(here("1_data",
"species_ID_13_threshold",
paste0("t", time_point_i, ".csv"))) %>%
rename(Ble_indiv_per_volume = Ble,
Cep_indiv_per_volume = Cep,
Col_indiv_per_volume = Col,
Eug_indiv_per_volume = Eug,
Eup_indiv_per_volume = Eup,
Lox_indiv_per_volume = Lox,
Pau_indiv_per_volume = Pau,
Pca_indiv_per_volume = Pca,
Spi_indiv_per_volume = Spi,
Spi_te_indiv_per_volume = Spi_te,
Tet_indiv_per_volume = Tet) %>%
select(file,
all_of(species_IDD_with_13_threshold_indiv_per_volume))
species_ID_40_threshold = read.csv(here("1_data",
"species_ID_40_threshold",
paste0("t", time_point_i, ".csv"))) %>%
rename(Ble_indiv_per_volume = Ble,
Cep_indiv_per_volume = Cep,
Col_indiv_per_volume = Col,
Eug_indiv_per_volume = Eug,
Eup_indiv_per_volume = Eup,
Lox_indiv_per_volume = Lox,
Pau_indiv_per_volume = Pau,
Pca_indiv_per_volume = Pca,
Spi_indiv_per_volume = Spi,
Spi_te_indiv_per_volume = Spi_te,
Tet_indiv_per_volume = Tet) %>%
select(file,
all_of(species_IDD_with_40_threshold_indiv_per_volume))
ds_ecosystems_t1_to_t7[[time_point_i]] = read.csv(here("1_data",
"ecosystems_13_threshold",
paste0("t", time_point_i, ".csv"))) %>%
arrange(file) %>%
mutate(video_replicate = rep(1 : time_point_day$video_replicates[time_point_i+1],
each = n_cultures),
day = time_point_day$day[time_point_day$time_point == time_point_i]) %>%
select(file,
time_point,
day,
video_replicate,
file,
culture_ID,
bioarea_per_volume,
indiv_per_volume)
ds_ecosystems_t1_to_t7[[time_point_i]] = ds_ecosystems_t1_to_t7[[time_point_i]] %>%
left_join(species_ID_13_threshold,
by = "file") %>%
left_join(species_ID_40_threshold,
by = "file")
}
ds_ecosystems_t1_to_t7 = ds_ecosystems_t1_to_t7 %>%
bind_rows()
# --- BIND T0 WITH ALL OTHER TIME POINTS --- #
ds_ecosystems = rbind(ds_ecosystems_t0,
ds_ecosystems_t1_to_t7) %>%
left_join(ecosystems_info,
by = "culture_ID")
expect_equal(nrow(ds_ecosystems),
sum(sum(time_point_day$video_replicates) * n_cultures))
# --- REODER AND RENAME COLUMNS --- #
ds_ecosystems = ds_ecosystems %>%
rename(ecosystem_size = patch_size,
ecosystem_size_ml = patch_size_volume) %>%
select(file,
time_point,
day,
disturbance,
culture_ID,
system_nr,
eco_metaeco_type,
ecosystem_size,
ecosystem_size_ml,
metaecosystem,
metaecosystem_type,
video_replicate,
bioarea_per_volume,
indiv_per_volume,
all_of(protist_species_indiv_per_volume)) %>%
rename(bioarea_µm2_per_μL = bioarea_per_volume) %>%
rename_all( ~ gsub("volume", "μL", .))
# --- RENAME AND REODER LEVELS --- #
ds_ecosystems <- ds_ecosystems %>%
mutate(ecosystem_size = case_when(ecosystem_size == "S" ~ "Small",
ecosystem_size == "M" ~ "Medium",
ecosystem_size == "L" ~ "Large",
TRUE ~ ecosystem_size),
connection = case_when(eco_metaeco_type == "S" ~ "unconnected",
eco_metaeco_type == "M" ~ "unconnected",
eco_metaeco_type == "L" ~ "unconnected",
eco_metaeco_type == "S (S_S)" ~ "connected to small",
eco_metaeco_type == "S (S_L)" ~ "connected to large",
eco_metaeco_type == "M (M_M)" ~ "connected to medium",
eco_metaeco_type == "L (S_L)" ~ "connected to small",
eco_metaeco_type == "L (L_L)" ~ "connected to large"),
ecosystem_type = paste(ecosystem_size, connection),
metaecosystem_type = case_when(metaecosystem_type == "S_S" ~ "Small-Small",
metaecosystem_type == "M_M" ~ "Medium-Medium",
metaecosystem_type == "L_L" ~ "Large-Large",
metaecosystem_type == "S_L" ~ "Small-Large",
TRUE ~ metaecosystem_type),
time_point = as.numeric(str_extract(time_point, "\\d+")),
file = as.numeric(str_extract(file, "\\d+")))
# --- CHANGE UNITS OF MEASUREMENTS TO ML --- #
ds_ecosystems = ds_ecosystems %>%
mutate(bioarea_µm2_per_ml = bioarea_µm2_per_μL * 10^3,
bioarea_mm2_per_ml = bioarea_µm2_per_ml * 10^(-6),
Ble_indiv_per_ml = Ble_indiv_per_μL * 10^3,
Cep_indiv_per_ml = Cep_indiv_per_μL * 10^3,
Col_indiv_per_ml = Col_indiv_per_μL * 10^3,
Eug_indiv_per_ml = Eug_indiv_per_μL * 10^3,
Eup_indiv_per_ml = Eup_indiv_per_μL * 10^3,
Lox_indiv_per_ml = Lox_indiv_per_μL * 10^3,
Pau_indiv_per_ml = Pau_indiv_per_μL * 10^3,
Pca_indiv_per_ml = Pca_indiv_per_μL * 10^3,
Spi_indiv_per_ml = Spi_indiv_per_μL * 10^3,
Spi_te_indiv_per_ml = Spi_te_indiv_per_μL * 10^3,
Tet_indiv_per_ml = Tet_indiv_per_μL * 10^3)
# --- TAKE OFF PROBLEMATIC VIDEOS --- #
# Set up parameters
ds_ecosystems_before_taking_off_videos = ds_ecosystems
# Take off problematic videos
ds_ecosystems = ds_ecosystems %>%
filter(!(time_point %in% videos_to_take_off$time_point & file %in% videos_to_take_off$file))
# Test code
diff = setdiff(ds_ecosystems_before_taking_off_videos, ds_ecosystems)
expect_equal(nrow(videos_to_take_off),
nrow(expand.grid(diff$culture_ID, diff$time_point, diff$file) %>% unique()))
# --- TAKE OFF PROBLEMATIC ECOSYSTEMS --- #
# Set up parameters
ds_ecosystems_before_taking_off_cultures = ds_ecosystems
# Take off problematic ecosystems
ds_ecosystems = ds_ecosystems %>%
filter(!culture_ID %in% ecosystems_to_take_off)
# Test code
expect_equal(setdiff(ds_ecosystems_before_taking_off_cultures,
ds_ecosystems) %>%
pull(culture_ID) %>%
unique(),
ecosystems_to_take_off)
# --- AVERAGE VIDEOS --- #
# Average videos
ds_ecosystems = ds_ecosystems %>%
group_by(across(all_of(columns_ecosystems))) %>%
summarise(across(contains("_per_ml"), mean),
across(contains("_tot"), mean)) %>%
ungroup()
# Test code
expect_equal(nrow(ds_ecosystems),
(n_cultures - length(ecosystems_to_take_off)) * length(time_points))
# --- ADD CONNECTION AND INDIVIDUALS --- #
ds_ecosystems = ds_ecosystems %>%
mutate(indiv_per_ml = !!rlang::parse_expr(paste(protist_species_indiv_per_ml,
collapse = " + ")))
# --- CALCULATE TOTAL RESPONSE VARIABLE FOR THE WHOLE ECOSYSTEM --- #
ds_ecosystems = ds_ecosystems %>%
mutate(bioarea_tot_mm2 = bioarea_mm2_per_ml * ecosystem_size_ml,
indiv_tot = indiv_per_ml * ecosystem_size_ml,
Ble_tot_indiv = Ble_indiv_per_ml * ecosystem_size_ml,
Cep_tot_indiv = Cep_indiv_per_ml * ecosystem_size_ml,
Col_tot_indiv = Col_indiv_per_ml * ecosystem_size_ml,
Eug_tot_indiv = Eug_indiv_per_ml * ecosystem_size_ml,
Eup_tot_indiv = Eup_indiv_per_ml * ecosystem_size_ml,
Lox_tot_indiv = Lox_indiv_per_ml * ecosystem_size_ml,
Pau_tot_indiv = Pau_indiv_per_ml * ecosystem_size_ml,
Pca_tot_indiv = Pca_indiv_per_ml * ecosystem_size_ml,
Spi_tot_indiv = Spi_indiv_per_ml * ecosystem_size_ml,
Spi_te_tot_indiv = Spi_te_indiv_per_ml * ecosystem_size_ml,
Tet_tot_indiv = Tet_indiv_per_ml * ecosystem_size_ml)
# --- CALCULATE SPECIES DOMINANCE --- #
# Calculate species dominance
ds_ecosystems = ds_ecosystems %>%
mutate(across(.cols = all_of(protist_species_indiv_per_ml),
.fns = list(dominance = ~ (. / indiv_per_ml) * 100),
.names = "{col}_dominance"))
# Test code
expect_equal(unique(ds_ecosystems$Ble_indiv_per_ml_dominance[ds_ecosystems$indiv_per_ml == 0]), NaN)
if (FALSE %in% unique((ds_ecosystems$Ble_indiv_per_ml/ds_ecosystems$indiv_per_ml) *100 == ds_ecosystems$Ble_indiv_per_ml_dominance)) stop()
# --- CALCULATE ALPHA DIVERSITY (SHANNON, SIMPSON, INVERSE SIMPSON, EVENNESS) --- #
# Set up parameters for testing
n_rows_ds_ecosystems_before_calculating_alpha = nrow(ds_ecosystems)
# Calculate alpha diversity
ds_ecosystems = calculate.alpha.diversity()
# Test code
expect_equal(max(ds_ecosystems$species_richness),
length(protist_species))
expect_equal(nrow(ds_ecosystems),
n_rows_ds_ecosystems_before_calculating_alpha)
# --- CALCULATE MEDIAN BODY SIZE --- #
# Set up parameters for testing
n_rows_ds_ecosystems_before_median_size = nrow(ds_ecosystems)
# Calculate median body size
ds_median_body_size = ds_individuals %>%
group_by(time_point,
culture_ID,
file) %>%
summarise(median_body_area_µm2 = median(body_area_µm2)) %>%
group_by(time_point,
culture_ID) %>%
summarise(median_body_area_µm2 = mean(median_body_area_µm2))
# Test code
expect_true(nrow(ds_median_body_size) <= nrow(ds_ecosystems)) #Ds median body size could be less because some cultures might be crashed and not have any individual.
# Set up parameters for testing
ds_ecosystems_before_full_join = ds_ecosystems
# Join the median body size with ds_ecosystems
ds_ecosystems = full_join(ds_ecosystems, ds_median_body_size)
# Test code
expect_equal(nrow(ds_ecosystems),
n_rows_ds_ecosystems_before_median_size)
# --- CALCULATE AUTO/HETEROTROPHIC RATIO --- #
ds_ecosystems = ds_ecosystems %>%
mutate(auto_hetero_ratio = (Eug_indiv_per_ml + Eup_indiv_per_ml) /
(Ble_indiv_per_ml +
Cep_indiv_per_ml +
Col_indiv_per_ml +
Lox_indiv_per_ml +
Pau_indiv_per_ml +
Pca_indiv_per_ml +
Spi_indiv_per_ml +
Spi_te_indiv_per_ml +
Tet_indiv_per_ml))
# --- ADD EVAPORATION RATES --- #
# Manipulate evaporation rates data
ds_for_evaporation = read.csv(here("1_data",
"evaporation",
"water_addition.csv")) %>%
pivot_longer(cols = starts_with("water_add_after_t"),
names_to = "time_point",
values_to = "water_addition_ml") %>%
mutate(time_point = as.double(str_extract(time_point, "\\d+")) + 1)
# Add evaporation rates to ds_ecosystems
ds_ecosystems = ds_ecosystems %>%
left_join(ds_for_evaporation)
# --- ADD TRANSFORMED VARIABLES --- #
ds_ecosystems = ds_ecosystems %>%
mutate(sqrt_bioarea_mm2_per_ml = sqrt(bioarea_mm2_per_ml),
sqrt_auto_hetero_ratio = sqrt(auto_hetero_ratio))
ds_ecosystems_effect_size)In this dataset (ds_ecosystems_effect_size) each row
represents a treatment at a time point. It contains the effect size of
the connection of a ecosystem (connected vs unconnected).
# --- CALCULATE THE MEAN & SD OF RESPONSE VARIABLES FOR EACH TREATMENT/CONTROL AT EACH TIME POINT --- #
ds_ecosystems_effect_size = NULL
variable_nr = 0
for (variable_i in variables_ecosystems) {
variable_nr = variable_nr + 1
ds_ecosystems_effect_size[[variable_nr]] = ds_ecosystems %>%
filter(time_point >= 1,
!is.na(!!sym(variable_i))) %>%
group_by(across(all_of(columns_ecosystems[columns_ecosystems != "culture_ID" &
columns_ecosystems != "system_nr"]))) %>%
summarise(across(all_of(variable_i),
list(mean = mean,
sd = sd)),
sample_size = n()) %>%
rename_with( ~ paste0(variable_i, "_sample_size"),
matches("sample_size"))
}
ds_ecosystems_effect_size <- reduce(ds_ecosystems_effect_size,
full_join,
by = columns_ecosystems[columns_ecosystems != "culture_ID" & columns_ecosystems != "system_nr"])
expect_equal(nrow(ds_ecosystems_effect_size),
n_ecosystem_types * (n_time_points-1) * n_disturbance_levels)
# --- CALCULATE THE EFFECT SIZE (HEDGE'S D) FOR EACH TREATMENT AT EACH TIME POINT --- #
for (variable_i in variables_ecosystems) {
ds_ecosystems_effect_size <- ds_ecosystems_effect_size %>%
mutate(!!paste0(variable_i, "_d") := NA,
!!paste0(variable_i, "_d_upper") := NA,
!!paste0(variable_i, "_d_lower") := NA)
}
row_i = 0
for (treatment_selected in treatments_and_controls$treatment) {
for (time_point_selected in time_points) {
row_i = row_i + 1
control_input = treatments_and_controls$control[treatments_and_controls$treatment == treatment_selected]
treatment_row = ds_ecosystems_effect_size %>%
filter(ecosystem_type == treatment_selected,
time_point == time_point_selected)
control_row = ds_ecosystems_effect_size %>%
filter(ecosystem_type == control_input,
time_point == time_point_selected)
for (response_variable in variables_ecosystems) {
hedges_d = calculate.hedges_d(treatment_row[[paste0(response_variable, "_mean")]],
treatment_row[[paste0(response_variable, "_sd")]],
treatment_row[[paste0(response_variable, "_sample_size")]],
control_row[[paste0(response_variable, "_mean")]],
control_row[[paste0(response_variable, "_sd")]],
control_row[[paste0(response_variable, "_sample_size")]])
ds_ecosystems_effect_size[[paste0(response_variable, "_d")]][
ds_ecosystems_effect_size$ecosystem_type == treatment_selected &
ds_ecosystems_effect_size$time_point == time_point_selected] =
hedges_d$d
ds_ecosystems_effect_size[[paste0(response_variable, "_d_upper")]][
ds_ecosystems_effect_size$ecosystem_type == treatment_selected &
ds_ecosystems_effect_size$time_point == time_point_selected] =
hedges_d$upper_CI
ds_ecosystems_effect_size[[paste0(response_variable, "_d_lower")]][
ds_ecosystems_effect_size$ecosystem_type == treatment_selected &
ds_ecosystems_effect_size$time_point == time_point_selected] =
hedges_d$lower_CI
}
}
}
expect_equal(nrow(ds_ecosystems_effect_size),
n_ecosystem_types * (n_time_points-1) * n_disturbance_levels)
ds_metaecosystems)In this dataset (ds_metaecosystems) each row represents
a meta-ecosystem or a two-ecosystem unconnected system at a time
point.
# --- FIND IDS OF UNCONNECTED ECOSYSTEMS --- #
ID_unconnected_S_low = ds_ecosystems %>%
filter(ecosystem_type == "Small unconnected",
disturbance == "low") %>%
pull(culture_ID) %>%
unique()
ID_unconnected_M_low = ds_ecosystems %>%
filter(ecosystem_type == "Medium unconnected",
disturbance == "low") %>%
pull(culture_ID) %>%
unique()
ID_unconnected_L_low = ds_ecosystems %>%
filter(ecosystem_type == "Large unconnected",
disturbance == "low") %>%
pull(culture_ID) %>%
unique()
ID_unconnected_S_high = ds_ecosystems %>%
filter(ecosystem_type == "Small unconnected",
disturbance == "high") %>%
pull(culture_ID) %>%
unique()
ID_unconnected_M_high = ds_ecosystems %>%
filter(ecosystem_type == "Medium unconnected",
disturbance == "high") %>%
pull(culture_ID) %>%
unique()
ID_unconnected_L_high = ds_ecosystems %>%
filter(ecosystem_type == "Large unconnected",
disturbance == "high") %>%
pull(culture_ID) %>%
unique()
# --- FIND COMBINATIONS OF ECOSYSTEMS TO CREATE UNCONNECTED META-ECOSYSTEMS --- #
combinations_S_and_L_low = crossing(ID_unconnected_S_low,
ID_unconnected_L_low) %>%
mutate(disturbance = "low",
metaecosystem_type = "Small-Large",
connection = "unconnected") %>%
rename(ID_first_ecosystem = ID_unconnected_S_low,
ID_second_ecosystem = ID_unconnected_L_low) %>%
select(disturbance,
metaecosystem_type,
connection,
ID_first_ecosystem,
ID_second_ecosystem)
combinations_S_and_L_high = crossing(ID_unconnected_S_high,
ID_unconnected_L_high) %>%
mutate(disturbance = "high",
metaecosystem_type = "Small-Large",
connection = "unconnected") %>%
rename(ID_first_ecosystem = ID_unconnected_S_high,
ID_second_ecosystem = ID_unconnected_L_high) %>%
select(disturbance,
metaecosystem_type,
connection,
ID_first_ecosystem,
ID_second_ecosystem)
combinations_M_and_M_low = combinat::combn(ID_unconnected_M_low,
m = 2) %>%
t() %>%
as.data.frame() %>%
rename(ID_first_ecosystem = V1,
ID_second_ecosystem = V2) %>%
mutate(disturbance = "low",
metaecosystem_type = "Medium-Medium",
connection = "unconnected") %>%
select(disturbance,
metaecosystem_type,
connection,
ID_first_ecosystem,
ID_second_ecosystem)
combinations_M_and_M_high = combinat::combn(ID_unconnected_M_high,
m = 2) %>%
t() %>%
as.data.frame() %>%
rename(ID_first_ecosystem = V1,
ID_second_ecosystem = V2) %>%
mutate(disturbance = "high",
metaecosystem_type = "Medium-Medium",
connection = "unconnected") %>%
select(disturbance,
metaecosystem_type,
connection,
ID_first_ecosystem,
ID_second_ecosystem)
# --- BIND ECOSYSTEM COMBINATIONS --- #
combinations_unconnected_metaeco = rbind(combinations_S_and_L_low,
combinations_S_and_L_high,
combinations_M_and_M_low,
combinations_M_and_M_high) %>%
mutate(system_nr = 1001:(1000 + nrow(.))) %>%
select(system_nr,
disturbance,
metaecosystem_type,
connection,
ID_first_ecosystem,
ID_second_ecosystem)
# --- FIND COMBINATIONS OF ECOSYSTEMS TO CREATE CONNECTED META-ECOSYSTEMS --- #
combinations_connected_metaeco = ds_ecosystems %>%
filter(time_point == 0,
metaecosystem == "yes") %>%
select(system_nr,
disturbance,
metaecosystem_type,
culture_ID) %>%
group_by(system_nr,
disturbance,
metaecosystem_type) %>%
summarise(ID_first_ecosystem = (mean(culture_ID) - 0.5),
ID_second_ecosystem = (mean(culture_ID) + 0.5)) %>%
mutate(connection = "connected") %>%
as.data.frame()
# --- BIND COMBINATIONS OF ECOSYSTEMS TO CREATE UNCONNECTED & CONNECTED META-ECOSYSTEMS --- #
ecos_combin = rbind(combinations_unconnected_metaeco,
combinations_connected_metaeco) %>%
mutate(ecosystems_combined = paste0(ID_first_ecosystem, "|", ID_second_ecosystem))
n_ecosystems_combinations = nrow(ecos_combin)
# --- CREATE SETS FOR SMALL-LARGE UNCONNECTED META-ECOSYSTEMS --- #
# Create sets for Small-Large unconnected meta-ecosystems where in each set a small and a large ecosystems are paired differently (keep the small ecosystems on the same order and perform permutations on the large ecosystems)
SL_unconn_ecos_comb_sets <- vector("list",
length(disturbance_levels))
for (disturbance_i in 1:length(disturbance_levels)) {
ID_small_ecosystems = ds_ecosystems %>%
filter(disturbance == disturbance_levels[disturbance_i],
ecosystem_type == "Small unconnected") %>%
pull(culture_ID) %>%
unique()
ID_large_ecosystems = ds_ecosystems %>%
filter(disturbance == disturbance_levels[disturbance_i],
ecosystem_type == "Large unconnected") %>%
pull(culture_ID) %>%
unique()
#Force small and large ecosystems vectors to have the same length
length_difference <- length(ID_small_ecosystems) - length(ID_large_ecosystems)
if (length_difference > 0) {
ID_large_ecosystems = c(ID_large_ecosystems,
rep("Patch taken off",
times = abs(length(ID_small_ecosystems) -
length(ID_large_ecosystems))))
} else if (length_difference < 0) {
ID_small_ecosystems = c(ID_small_ecosystems,
rep("Patch taken off",
times = abs(length(ID_large_ecosystems) -
length(ID_small_ecosystems))))
}
# Create dataframe
permutations_large = permn(ID_large_ecosystems)
SL_unconn_ecos_comb_sets[[disturbance_i]] = data.frame(
disturbance = disturbance_levels[disturbance_i],
metaecosystem_type = "Small-Large",
connection = "unconnected",
ID_first_ecosystem = rep(ID_small_ecosystems,
times = length(permutations_large)),
ID_second_ecosystem = unlist(permutations_large),
set = rep(1 : length(permutations_large),
each = length(ID_small_ecosystems)))
expect_equal(nrow(SL_unconn_ecos_comb_sets[[disturbance_i]]),
length(ID_small_ecosystems) * length(permutations_large))
SL_unconn_ecos_comb_sets[[disturbance_i]] = SL_unconn_ecos_comb_sets[[disturbance_i]] %>%
filter(!ID_first_ecosystem == "Patch taken off",
!ID_second_ecosystem == "Patch taken off") %>%
mutate(ID_first_ecosystem = as.double(ID_first_ecosystem),
ID_second_ecosystem = as.double(ID_second_ecosystem)) %>%
full_join(ecos_combin %>%
filter(disturbance == disturbance_levels[disturbance_i],
metaecosystem_type == "Small-Large",
connection == "unconnected")) #Add system_nr & ecosystems_combined
}
SL_unconn_ecos_comb_sets_before_binding = SL_unconn_ecos_comb_sets
SL_unconn_ecos_comb_sets = SL_unconn_ecos_comb_sets %>%
bind_rows()
# --- TEST CODE --- #
expect_equal(nrow(SL_unconn_ecos_comb_sets),
nrow(SL_unconn_ecos_comb_sets_before_binding[[1]]) + nrow(SL_unconn_ecos_comb_sets_before_binding[[2]]))
expect_equal(length(SL_unconn_ecos_comb_sets %>%
pull(system_nr) %>%
unique()),
length(ecos_combin %>%
filter(metaecosystem_type == "Small-Large",
connection == "unconnected") %>%
pull(system_nr) %>%
unique()))
# --- CREATE SETS FOR MEDIUM-MEDIUM UNCONNECTED META-ECOSYSTEMS --- #
# Create sets for Medium-Medium unconnected meta-ecosystems where in each set two different medium ecosystems are paired. To do so, initialise MM_unconn_ecos_comb_sets. Assign 10^4 rows to each matrix so that we have enough rows not to run out of them when we try to assign values to them. Assign 4 columns which will include culture_ID of the first system, second culture_ID of the fist system, culture_ID of the second system, and second culture_ID of the second system.
MM_unconn_ecos_comb_sets = NULL
for(disturbance_i in 1:length(disturbance_levels)){
MM_unconn_ecos_comb_sets[[disturbance_i]] <- matrix(nrow = 10 ^ 4,
ncol = 4)
}
for (disturbance_i in 1:length(disturbance_levels)) {
ID_medium_ecosystems = ds_ecosystems %>%
filter(disturbance == disturbance_levels[disturbance_i],
ecosystem_type == "Medium unconnected") %>%
pull(culture_ID) %>%
unique()
MM_unconnected_systems = combn(ID_medium_ecosystems,
2) %>%
t()
matrix_row = 0
for (first_system_i in 1:nrow(MM_unconnected_systems)) {
#Find culture IDs of the first system (what's the first system?)
first_system = MM_unconnected_systems[first_system_i, ]
for (second_system_i in 1:nrow(MM_unconnected_systems)) {
#Find culture IDs of the second system (what's the second system?)
second_system = MM_unconnected_systems[second_system_i, ]
shared_elements_among_systems = intersect(first_system,
second_system)
if (length(shared_elements_among_systems) == 0) {
matrix_row = matrix_row + 1
#Make first and second system into a set
MM_unconn_ecos_comb_sets[[disturbance_i]][matrix_row,] = c(first_system,
second_system)
print(MM_unconn_ecos_comb_sets[[disturbance_i]][matrix_row,])
}
}
}
#Tidy the dataset with all the ecosystem combinations
MM_unconn_ecos_comb_sets[[disturbance_i]] = MM_unconn_ecos_comb_sets[[disturbance_i]] %>%
as.data.frame() %>%
drop_na()
# Test code
expect_equal(MM_unconn_ecos_comb_sets[[disturbance_i]] %>%
filter(V1 == V2 |
V1 == V3 |
V1 == V4 |
V2 == V3 |
V2 == V4 |
V3 == V4) %>%
nrow(),
0)
#Reorder the dataset with all the ecosystem combinations
MM_unconn_ecos_comb_sets_reordered = data.frame(ID_first_ecosystem = NA,
ID_second_ecosystem = NA,
set = NA)
for (set_input in 1:nrow(MM_unconn_ecos_comb_sets[[disturbance_i]])) {
MM_unconn_ecos_comb_sets_reordered = MM_unconn_ecos_comb_sets_reordered %>%
add_row(ID_first_ecosystem = MM_unconn_ecos_comb_sets[[disturbance_i]][set_input, 1],
ID_second_ecosystem = MM_unconn_ecos_comb_sets[[disturbance_i]][set_input, 2],
set = set_input) %>%
add_row(ID_first_ecosystem = MM_unconn_ecos_comb_sets[[disturbance_i]][set_input, 3],
ID_second_ecosystem = MM_unconn_ecos_comb_sets[[disturbance_i]][set_input, 4],
set = set_input)
}
#Add to a list
MM_unconn_ecos_comb_sets[[disturbance_i]] = MM_unconn_ecos_comb_sets_reordered %>%
drop_na() %>%
mutate(disturbance = disturbance_levels[disturbance_i],
metaecosystem_type = "Medium-Medium",
connection = "unconnected")
#Add system nr
ID_combinations_MM_unconnected = ecos_combin %>%
filter(disturbance == disturbance_levels[disturbance_i],
metaecosystem_type == "Medium-Medium",
connection == "unconnected")
MM_unconn_ecos_comb_sets[[disturbance_i]] = full_join(MM_unconn_ecos_comb_sets[[disturbance_i]],
ID_combinations_MM_unconnected)
}
## [1] 6 7 8 9
## [1] 6 7 8 10
## [1] 6 7 9 10
## [1] 6 8 7 9
## [1] 6 8 7 10
## [1] 6 8 9 10
## [1] 6 9 7 8
## [1] 6 9 7 10
## [1] 6 9 8 10
## [1] 6 10 7 8
## [1] 6 10 7 9
## [1] 6 10 8 9
## [1] 7 8 6 9
## [1] 7 8 6 10
## [1] 7 8 9 10
## [1] 7 9 6 8
## [1] 7 9 6 10
## [1] 7 9 8 10
## [1] 7 10 6 8
## [1] 7 10 6 9
## [1] 7 10 8 9
## [1] 8 9 6 7
## [1] 8 9 6 10
## [1] 8 9 7 10
## [1] 8 10 6 7
## [1] 8 10 6 9
## [1] 8 10 7 9
## [1] 9 10 6 7
## [1] 9 10 6 8
## [1] 9 10 7 8
## [1] 61 62 63 64
## [1] 61 62 63 65
## [1] 61 62 64 65
## [1] 61 63 62 64
## [1] 61 63 62 65
## [1] 61 63 64 65
## [1] 61 64 62 63
## [1] 61 64 62 65
## [1] 61 64 63 65
## [1] 61 65 62 63
## [1] 61 65 62 64
## [1] 61 65 63 64
## [1] 62 63 61 64
## [1] 62 63 61 65
## [1] 62 63 64 65
## [1] 62 64 61 63
## [1] 62 64 61 65
## [1] 62 64 63 65
## [1] 62 65 61 63
## [1] 62 65 61 64
## [1] 62 65 63 64
## [1] 63 64 61 62
## [1] 63 64 61 65
## [1] 63 64 62 65
## [1] 63 65 61 62
## [1] 63 65 61 64
## [1] 63 65 62 64
## [1] 64 65 61 62
## [1] 64 65 61 63
## [1] 64 65 62 63
#Bind all sets of MM unconnected
MM_unconn_ecos_comb_sets = MM_unconn_ecos_comb_sets %>%
bind_rows()
# --- TEST CODE --- #
expect_equal(length(MM_unconn_ecos_comb_sets %>%
pull(system_nr) %>%
unique()),
length(ecos_combin %>%
filter(metaecosystem_type == "Medium-Medium",
connection == "unconnected") %>%
pull(system_nr) %>%
unique()))
# --- BIND SL AND MM UNCONNECTED META-ECOSYSTEMS SETS --- #
ecos_combin_unconn_sets = rbind(SL_unconn_ecos_comb_sets,
MM_unconn_ecos_comb_sets) %>%
select(disturbance,
metaecosystem_type,
connection,
set,
system_nr,
ID_first_ecosystem,
ID_second_ecosystem)
# --- FIND SETS OF SETS --- #
# Find the number of combinations of SL & MM low or high disturbance
n_SL_low_sets = ecos_combin_unconn_sets %>%
filter(metaecosystem_type == "Small-Large",
disturbance == "low") %>%
pull(set) %>%
max()
n_SL_high_sets = ecos_combin_unconn_sets %>%
filter(metaecosystem_type == "Small-Large",
disturbance == "high") %>%
pull(set) %>%
max()
n_MM_low_sets = ecos_combin_unconn_sets %>%
filter(metaecosystem_type == "Medium-Medium",
disturbance == "low") %>%
pull(set) %>%
max()
n_MM_high_sets = ecos_combin_unconn_sets %>%
filter(metaecosystem_type == "Medium-Medium",
disturbance == "high") %>%
pull(set) %>%
max()
# Find combinations of sets
ecos_combin_unconn_sets_of_sets <- expand.grid(set_SL_low = 1:n_SL_low_sets,
set_SL_high = 1:n_SL_high_sets,
set_MM_low = 1:n_MM_low_sets,
set_MM_high = 1:n_MM_high_sets) %>%
print()
## set_SL_low set_SL_high set_MM_low set_MM_high
## 1 1 1 1 1
## 2 2 1 1 1
## 3 3 1 1 1
## 4 4 1 1 1
## 5 5 1 1 1
## 6 6 1 1 1
## 7 7 1 1 1
## 8 8 1 1 1
## 9 9 1 1 1
## 10 10 1 1 1
## 11 11 1 1 1
## 12 12 1 1 1
## 13 13 1 1 1
## 14 14 1 1 1
## 15 15 1 1 1
## 16 16 1 1 1
## 17 17 1 1 1
## 18 18 1 1 1
## 19 19 1 1 1
## 20 20 1 1 1
## 21 21 1 1 1
## 22 22 1 1 1
## 23 23 1 1 1
## 24 24 1 1 1
## 25 25 1 1 1
## 26 26 1 1 1
## 27 27 1 1 1
## 28 28 1 1 1
## 29 29 1 1 1
## 30 30 1 1 1
## 31 31 1 1 1
## 32 32 1 1 1
## 33 33 1 1 1
## 34 34 1 1 1
## 35 35 1 1 1
## 36 36 1 1 1
## 37 37 1 1 1
## 38 38 1 1 1
## 39 39 1 1 1
## 40 40 1 1 1
## 41 41 1 1 1
## 42 42 1 1 1
## 43 43 1 1 1
## 44 44 1 1 1
## 45 45 1 1 1
## 46 46 1 1 1
## 47 47 1 1 1
## 48 48 1 1 1
## 49 49 1 1 1
## 50 50 1 1 1
## 51 51 1 1 1
## 52 52 1 1 1
## 53 53 1 1 1
## 54 54 1 1 1
## 55 55 1 1 1
## 56 56 1 1 1
## 57 57 1 1 1
## 58 58 1 1 1
## 59 59 1 1 1
## 60 60 1 1 1
## 61 61 1 1 1
## 62 62 1 1 1
## 63 63 1 1 1
## 64 64 1 1 1
## 65 65 1 1 1
## 66 66 1 1 1
## 67 67 1 1 1
## 68 68 1 1 1
## 69 69 1 1 1
## 70 70 1 1 1
## 71 71 1 1 1
## 72 72 1 1 1
## 73 73 1 1 1
## 74 74 1 1 1
## 75 75 1 1 1
## 76 76 1 1 1
## 77 77 1 1 1
## 78 78 1 1 1
## 79 79 1 1 1
## 80 80 1 1 1
## 81 81 1 1 1
## 82 82 1 1 1
## 83 83 1 1 1
## 84 84 1 1 1
## 85 85 1 1 1
## 86 86 1 1 1
## 87 87 1 1 1
## 88 88 1 1 1
## 89 89 1 1 1
## 90 90 1 1 1
## 91 91 1 1 1
## 92 92 1 1 1
## 93 93 1 1 1
## 94 94 1 1 1
## 95 95 1 1 1
## 96 96 1 1 1
## 97 97 1 1 1
## 98 98 1 1 1
## 99 99 1 1 1
## 100 100 1 1 1
## 101 101 1 1 1
## 102 102 1 1 1
## 103 103 1 1 1
## 104 104 1 1 1
## 105 105 1 1 1
## 106 106 1 1 1
## 107 107 1 1 1
## 108 108 1 1 1
## 109 109 1 1 1
## 110 110 1 1 1
## 111 111 1 1 1
## 112 112 1 1 1
## 113 113 1 1 1
## 114 114 1 1 1
## 115 115 1 1 1
## 116 116 1 1 1
## 117 117 1 1 1
## 118 118 1 1 1
## 119 119 1 1 1
## 120 120 1 1 1
## 121 1 2 1 1
## 122 2 2 1 1
## 123 3 2 1 1
## 124 4 2 1 1
## 125 5 2 1 1
## 126 6 2 1 1
## 127 7 2 1 1
## 128 8 2 1 1
## 129 9 2 1 1
## 130 10 2 1 1
## 131 11 2 1 1
## 132 12 2 1 1
## 133 13 2 1 1
## 134 14 2 1 1
## 135 15 2 1 1
## 136 16 2 1 1
## 137 17 2 1 1
## 138 18 2 1 1
## 139 19 2 1 1
## 140 20 2 1 1
## 141 21 2 1 1
## 142 22 2 1 1
## 143 23 2 1 1
## 144 24 2 1 1
## 145 25 2 1 1
## 146 26 2 1 1
## 147 27 2 1 1
## 148 28 2 1 1
## 149 29 2 1 1
## 150 30 2 1 1
## 151 31 2 1 1
## 152 32 2 1 1
## 153 33 2 1 1
## 154 34 2 1 1
## 155 35 2 1 1
## 156 36 2 1 1
## 157 37 2 1 1
## 158 38 2 1 1
## 159 39 2 1 1
## 160 40 2 1 1
## 161 41 2 1 1
## 162 42 2 1 1
## 163 43 2 1 1
## 164 44 2 1 1
## 165 45 2 1 1
## 166 46 2 1 1
## 167 47 2 1 1
## 168 48 2 1 1
## 169 49 2 1 1
## 170 50 2 1 1
## 171 51 2 1 1
## 172 52 2 1 1
## 173 53 2 1 1
## 174 54 2 1 1
## 175 55 2 1 1
## 176 56 2 1 1
## 177 57 2 1 1
## 178 58 2 1 1
## 179 59 2 1 1
## 180 60 2 1 1
## 181 61 2 1 1
## 182 62 2 1 1
## 183 63 2 1 1
## 184 64 2 1 1
## 185 65 2 1 1
## 186 66 2 1 1
## 187 67 2 1 1
## 188 68 2 1 1
## 189 69 2 1 1
## 190 70 2 1 1
## 191 71 2 1 1
## 192 72 2 1 1
## 193 73 2 1 1
## 194 74 2 1 1
## 195 75 2 1 1
## 196 76 2 1 1
## 197 77 2 1 1
## 198 78 2 1 1
## 199 79 2 1 1
## 200 80 2 1 1
## 201 81 2 1 1
## 202 82 2 1 1
## 203 83 2 1 1
## 204 84 2 1 1
## 205 85 2 1 1
## 206 86 2 1 1
## 207 87 2 1 1
## 208 88 2 1 1
## 209 89 2 1 1
## 210 90 2 1 1
## 211 91 2 1 1
## 212 92 2 1 1
## 213 93 2 1 1
## 214 94 2 1 1
## 215 95 2 1 1
## 216 96 2 1 1
## 217 97 2 1 1
## 218 98 2 1 1
## 219 99 2 1 1
## 220 100 2 1 1
## 221 101 2 1 1
## 222 102 2 1 1
## 223 103 2 1 1
## 224 104 2 1 1
## 225 105 2 1 1
## 226 106 2 1 1
## 227 107 2 1 1
## 228 108 2 1 1
## 229 109 2 1 1
## 230 110 2 1 1
## 231 111 2 1 1
## 232 112 2 1 1
## 233 113 2 1 1
## 234 114 2 1 1
## 235 115 2 1 1
## 236 116 2 1 1
## 237 117 2 1 1
## 238 118 2 1 1
## 239 119 2 1 1
## 240 120 2 1 1
## 241 1 3 1 1
## 242 2 3 1 1
## 243 3 3 1 1
## 244 4 3 1 1
## 245 5 3 1 1
## 246 6 3 1 1
## 247 7 3 1 1
## 248 8 3 1 1
## 249 9 3 1 1
## 250 10 3 1 1
## 251 11 3 1 1
## 252 12 3 1 1
## 253 13 3 1 1
## 254 14 3 1 1
## 255 15 3 1 1
## 256 16 3 1 1
## 257 17 3 1 1
## 258 18 3 1 1
## 259 19 3 1 1
## 260 20 3 1 1
## 261 21 3 1 1
## 262 22 3 1 1
## 263 23 3 1 1
## 264 24 3 1 1
## 265 25 3 1 1
## 266 26 3 1 1
## 267 27 3 1 1
## 268 28 3 1 1
## 269 29 3 1 1
## 270 30 3 1 1
## 271 31 3 1 1
## 272 32 3 1 1
## 273 33 3 1 1
## 274 34 3 1 1
## 275 35 3 1 1
## 276 36 3 1 1
## 277 37 3 1 1
## 278 38 3 1 1
## 279 39 3 1 1
## 280 40 3 1 1
## 281 41 3 1 1
## 282 42 3 1 1
## 283 43 3 1 1
## 284 44 3 1 1
## 285 45 3 1 1
## 286 46 3 1 1
## 287 47 3 1 1
## 288 48 3 1 1
## 289 49 3 1 1
## 290 50 3 1 1
## 291 51 3 1 1
## 292 52 3 1 1
## 293 53 3 1 1
## 294 54 3 1 1
## 295 55 3 1 1
## 296 56 3 1 1
## 297 57 3 1 1
## 298 58 3 1 1
## 299 59 3 1 1
## 300 60 3 1 1
## 301 61 3 1 1
## 302 62 3 1 1
## 303 63 3 1 1
## 304 64 3 1 1
## 305 65 3 1 1
## 306 66 3 1 1
## 307 67 3 1 1
## 308 68 3 1 1
## 309 69 3 1 1
## 310 70 3 1 1
## 311 71 3 1 1
## 312 72 3 1 1
## 313 73 3 1 1
## 314 74 3 1 1
## 315 75 3 1 1
## 316 76 3 1 1
## 317 77 3 1 1
## 318 78 3 1 1
## 319 79 3 1 1
## 320 80 3 1 1
## 321 81 3 1 1
## 322 82 3 1 1
## 323 83 3 1 1
## 324 84 3 1 1
## 325 85 3 1 1
## 326 86 3 1 1
## 327 87 3 1 1
## 328 88 3 1 1
## 329 89 3 1 1
## 330 90 3 1 1
## 331 91 3 1 1
## 332 92 3 1 1
## 333 93 3 1 1
## 334 94 3 1 1
## 335 95 3 1 1
## 336 96 3 1 1
## 337 97 3 1 1
## 338 98 3 1 1
## 339 99 3 1 1
## 340 100 3 1 1
## 341 101 3 1 1
## 342 102 3 1 1
## 343 103 3 1 1
## 344 104 3 1 1
## 345 105 3 1 1
## 346 106 3 1 1
## 347 107 3 1 1
## 348 108 3 1 1
## 349 109 3 1 1
## 350 110 3 1 1
## 351 111 3 1 1
## 352 112 3 1 1
## 353 113 3 1 1
## 354 114 3 1 1
## 355 115 3 1 1
## 356 116 3 1 1
## 357 117 3 1 1
## 358 118 3 1 1
## 359 119 3 1 1
## 360 120 3 1 1
## 361 1 4 1 1
## 362 2 4 1 1
## 363 3 4 1 1
## 364 4 4 1 1
## 365 5 4 1 1
## 366 6 4 1 1
## 367 7 4 1 1
## 368 8 4 1 1
## 369 9 4 1 1
## 370 10 4 1 1
## 371 11 4 1 1
## 372 12 4 1 1
## 373 13 4 1 1
## 374 14 4 1 1
## 375 15 4 1 1
## 376 16 4 1 1
## 377 17 4 1 1
## 378 18 4 1 1
## 379 19 4 1 1
## 380 20 4 1 1
## 381 21 4 1 1
## 382 22 4 1 1
## 383 23 4 1 1
## 384 24 4 1 1
## 385 25 4 1 1
## 386 26 4 1 1
## 387 27 4 1 1
## 388 28 4 1 1
## 389 29 4 1 1
## 390 30 4 1 1
## 391 31 4 1 1
## 392 32 4 1 1
## 393 33 4 1 1
## 394 34 4 1 1
## 395 35 4 1 1
## 396 36 4 1 1
## 397 37 4 1 1
## 398 38 4 1 1
## 399 39 4 1 1
## 400 40 4 1 1
## 401 41 4 1 1
## 402 42 4 1 1
## 403 43 4 1 1
## 404 44 4 1 1
## 405 45 4 1 1
## 406 46 4 1 1
## 407 47 4 1 1
## 408 48 4 1 1
## 409 49 4 1 1
## 410 50 4 1 1
## 411 51 4 1 1
## 412 52 4 1 1
## 413 53 4 1 1
## 414 54 4 1 1
## 415 55 4 1 1
## 416 56 4 1 1
## 417 57 4 1 1
## 418 58 4 1 1
## 419 59 4 1 1
## 420 60 4 1 1
## 421 61 4 1 1
## 422 62 4 1 1
## 423 63 4 1 1
## 424 64 4 1 1
## 425 65 4 1 1
## 426 66 4 1 1
## 427 67 4 1 1
## 428 68 4 1 1
## 429 69 4 1 1
## 430 70 4 1 1
## 431 71 4 1 1
## 432 72 4 1 1
## 433 73 4 1 1
## 434 74 4 1 1
## 435 75 4 1 1
## 436 76 4 1 1
## 437 77 4 1 1
## 438 78 4 1 1
## 439 79 4 1 1
## 440 80 4 1 1
## 441 81 4 1 1
## 442 82 4 1 1
## 443 83 4 1 1
## 444 84 4 1 1
## 445 85 4 1 1
## 446 86 4 1 1
## 447 87 4 1 1
## 448 88 4 1 1
## 449 89 4 1 1
## 450 90 4 1 1
## 451 91 4 1 1
## 452 92 4 1 1
## 453 93 4 1 1
## 454 94 4 1 1
## 455 95 4 1 1
## 456 96 4 1 1
## 457 97 4 1 1
## 458 98 4 1 1
## 459 99 4 1 1
## 460 100 4 1 1
## 461 101 4 1 1
## 462 102 4 1 1
## 463 103 4 1 1
## 464 104 4 1 1
## 465 105 4 1 1
## 466 106 4 1 1
## 467 107 4 1 1
## 468 108 4 1 1
## 469 109 4 1 1
## 470 110 4 1 1
## 471 111 4 1 1
## 472 112 4 1 1
## 473 113 4 1 1
## 474 114 4 1 1
## 475 115 4 1 1
## 476 116 4 1 1
## 477 117 4 1 1
## 478 118 4 1 1
## 479 119 4 1 1
## 480 120 4 1 1
## 481 1 5 1 1
## 482 2 5 1 1
## 483 3 5 1 1
## 484 4 5 1 1
## 485 5 5 1 1
## 486 6 5 1 1
## 487 7 5 1 1
## 488 8 5 1 1
## 489 9 5 1 1
## 490 10 5 1 1
## 491 11 5 1 1
## 492 12 5 1 1
## 493 13 5 1 1
## 494 14 5 1 1
## 495 15 5 1 1
## 496 16 5 1 1
## 497 17 5 1 1
## 498 18 5 1 1
## 499 19 5 1 1
## 500 20 5 1 1
## 501 21 5 1 1
## 502 22 5 1 1
## 503 23 5 1 1
## 504 24 5 1 1
## 505 25 5 1 1
## 506 26 5 1 1
## 507 27 5 1 1
## 508 28 5 1 1
## 509 29 5 1 1
## 510 30 5 1 1
## 511 31 5 1 1
## 512 32 5 1 1
## 513 33 5 1 1
## 514 34 5 1 1
## 515 35 5 1 1
## 516 36 5 1 1
## 517 37 5 1 1
## 518 38 5 1 1
## 519 39 5 1 1
## 520 40 5 1 1
## 521 41 5 1 1
## 522 42 5 1 1
## 523 43 5 1 1
## 524 44 5 1 1
## 525 45 5 1 1
## 526 46 5 1 1
## 527 47 5 1 1
## 528 48 5 1 1
## 529 49 5 1 1
## 530 50 5 1 1
## 531 51 5 1 1
## 532 52 5 1 1
## 533 53 5 1 1
## 534 54 5 1 1
## 535 55 5 1 1
## 536 56 5 1 1
## 537 57 5 1 1
## 538 58 5 1 1
## 539 59 5 1 1
## 540 60 5 1 1
## 541 61 5 1 1
## 542 62 5 1 1
## 543 63 5 1 1
## 544 64 5 1 1
## 545 65 5 1 1
## 546 66 5 1 1
## 547 67 5 1 1
## 548 68 5 1 1
## 549 69 5 1 1
## 550 70 5 1 1
## 551 71 5 1 1
## 552 72 5 1 1
## 553 73 5 1 1
## 554 74 5 1 1
## 555 75 5 1 1
## 556 76 5 1 1
## 557 77 5 1 1
## 558 78 5 1 1
## 559 79 5 1 1
## 560 80 5 1 1
## 561 81 5 1 1
## 562 82 5 1 1
## 563 83 5 1 1
## 564 84 5 1 1
## 565 85 5 1 1
## 566 86 5 1 1
## 567 87 5 1 1
## 568 88 5 1 1
## 569 89 5 1 1
## 570 90 5 1 1
## 571 91 5 1 1
## 572 92 5 1 1
## 573 93 5 1 1
## 574 94 5 1 1
## 575 95 5 1 1
## 576 96 5 1 1
## 577 97 5 1 1
## 578 98 5 1 1
## 579 99 5 1 1
## 580 100 5 1 1
## 581 101 5 1 1
## 582 102 5 1 1
## 583 103 5 1 1
## 584 104 5 1 1
## 585 105 5 1 1
## 586 106 5 1 1
## 587 107 5 1 1
## 588 108 5 1 1
## 589 109 5 1 1
## 590 110 5 1 1
## 591 111 5 1 1
## 592 112 5 1 1
## 593 113 5 1 1
## 594 114 5 1 1
## 595 115 5 1 1
## 596 116 5 1 1
## 597 117 5 1 1
## 598 118 5 1 1
## 599 119 5 1 1
## 600 120 5 1 1
## 601 1 6 1 1
## 602 2 6 1 1
## 603 3 6 1 1
## 604 4 6 1 1
## 605 5 6 1 1
## 606 6 6 1 1
## 607 7 6 1 1
## 608 8 6 1 1
## 609 9 6 1 1
## 610 10 6 1 1
## 611 11 6 1 1
## 612 12 6 1 1
## 613 13 6 1 1
## 614 14 6 1 1
## 615 15 6 1 1
## 616 16 6 1 1
## 617 17 6 1 1
## 618 18 6 1 1
## 619 19 6 1 1
## 620 20 6 1 1
## 621 21 6 1 1
## 622 22 6 1 1
## 623 23 6 1 1
## 624 24 6 1 1
## 625 25 6 1 1
## 626 26 6 1 1
## 627 27 6 1 1
## 628 28 6 1 1
## 629 29 6 1 1
## 630 30 6 1 1
## 631 31 6 1 1
## 632 32 6 1 1
## 633 33 6 1 1
## 634 34 6 1 1
## 635 35 6 1 1
## 636 36 6 1 1
## 637 37 6 1 1
## 638 38 6 1 1
## 639 39 6 1 1
## 640 40 6 1 1
## 641 41 6 1 1
## 642 42 6 1 1
## 643 43 6 1 1
## 644 44 6 1 1
## 645 45 6 1 1
## 646 46 6 1 1
## 647 47 6 1 1
## 648 48 6 1 1
## 649 49 6 1 1
## 650 50 6 1 1
## 651 51 6 1 1
## 652 52 6 1 1
## 653 53 6 1 1
## 654 54 6 1 1
## 655 55 6 1 1
## 656 56 6 1 1
## 657 57 6 1 1
## 658 58 6 1 1
## 659 59 6 1 1
## 660 60 6 1 1
## 661 61 6 1 1
## 662 62 6 1 1
## 663 63 6 1 1
## 664 64 6 1 1
## 665 65 6 1 1
## 666 66 6 1 1
## 667 67 6 1 1
## 668 68 6 1 1
## 669 69 6 1 1
## 670 70 6 1 1
## 671 71 6 1 1
## 672 72 6 1 1
## 673 73 6 1 1
## 674 74 6 1 1
## 675 75 6 1 1
## 676 76 6 1 1
## 677 77 6 1 1
## 678 78 6 1 1
## 679 79 6 1 1
## 680 80 6 1 1
## 681 81 6 1 1
## 682 82 6 1 1
## 683 83 6 1 1
## 684 84 6 1 1
## 685 85 6 1 1
## 686 86 6 1 1
## 687 87 6 1 1
## 688 88 6 1 1
## 689 89 6 1 1
## 690 90 6 1 1
## 691 91 6 1 1
## 692 92 6 1 1
## 693 93 6 1 1
## 694 94 6 1 1
## 695 95 6 1 1
## 696 96 6 1 1
## 697 97 6 1 1
## 698 98 6 1 1
## 699 99 6 1 1
## 700 100 6 1 1
## 701 101 6 1 1
## 702 102 6 1 1
## 703 103 6 1 1
## 704 104 6 1 1
## 705 105 6 1 1
## 706 106 6 1 1
## 707 107 6 1 1
## 708 108 6 1 1
## 709 109 6 1 1
## 710 110 6 1 1
## 711 111 6 1 1
## 712 112 6 1 1
## 713 113 6 1 1
## 714 114 6 1 1
## 715 115 6 1 1
## 716 116 6 1 1
## 717 117 6 1 1
## 718 118 6 1 1
## 719 119 6 1 1
## 720 120 6 1 1
## 721 1 7 1 1
## 722 2 7 1 1
## 723 3 7 1 1
## 724 4 7 1 1
## 725 5 7 1 1
## 726 6 7 1 1
## 727 7 7 1 1
## 728 8 7 1 1
## 729 9 7 1 1
## 730 10 7 1 1
## 731 11 7 1 1
## 732 12 7 1 1
## 733 13 7 1 1
## 734 14 7 1 1
## 735 15 7 1 1
## 736 16 7 1 1
## 737 17 7 1 1
## 738 18 7 1 1
## 739 19 7 1 1
## 740 20 7 1 1
## 741 21 7 1 1
## 742 22 7 1 1
## 743 23 7 1 1
## 744 24 7 1 1
## 745 25 7 1 1
## 746 26 7 1 1
## 747 27 7 1 1
## 748 28 7 1 1
## 749 29 7 1 1
## 750 30 7 1 1
## 751 31 7 1 1
## 752 32 7 1 1
## 753 33 7 1 1
## 754 34 7 1 1
## 755 35 7 1 1
## 756 36 7 1 1
## 757 37 7 1 1
## 758 38 7 1 1
## 759 39 7 1 1
## 760 40 7 1 1
## 761 41 7 1 1
## 762 42 7 1 1
## 763 43 7 1 1
## 764 44 7 1 1
## 765 45 7 1 1
## 766 46 7 1 1
## 767 47 7 1 1
## 768 48 7 1 1
## 769 49 7 1 1
## 770 50 7 1 1
## 771 51 7 1 1
## 772 52 7 1 1
## 773 53 7 1 1
## 774 54 7 1 1
## 775 55 7 1 1
## 776 56 7 1 1
## 777 57 7 1 1
## 778 58 7 1 1
## 779 59 7 1 1
## 780 60 7 1 1
## 781 61 7 1 1
## 782 62 7 1 1
## 783 63 7 1 1
## 784 64 7 1 1
## 785 65 7 1 1
## 786 66 7 1 1
## 787 67 7 1 1
## 788 68 7 1 1
## 789 69 7 1 1
## 790 70 7 1 1
## 791 71 7 1 1
## 792 72 7 1 1
## 793 73 7 1 1
## 794 74 7 1 1
## 795 75 7 1 1
## 796 76 7 1 1
## 797 77 7 1 1
## 798 78 7 1 1
## 799 79 7 1 1
## 800 80 7 1 1
## 801 81 7 1 1
## 802 82 7 1 1
## 803 83 7 1 1
## 804 84 7 1 1
## 805 85 7 1 1
## 806 86 7 1 1
## 807 87 7 1 1
## 808 88 7 1 1
## 809 89 7 1 1
## 810 90 7 1 1
## 811 91 7 1 1
## 812 92 7 1 1
## 813 93 7 1 1
## 814 94 7 1 1
## 815 95 7 1 1
## 816 96 7 1 1
## 817 97 7 1 1
## 818 98 7 1 1
## 819 99 7 1 1
## 820 100 7 1 1
## 821 101 7 1 1
## 822 102 7 1 1
## 823 103 7 1 1
## 824 104 7 1 1
## 825 105 7 1 1
## 826 106 7 1 1
## 827 107 7 1 1
## 828 108 7 1 1
## 829 109 7 1 1
## 830 110 7 1 1
## 831 111 7 1 1
## 832 112 7 1 1
## 833 113 7 1 1
## 834 114 7 1 1
## 835 115 7 1 1
## 836 116 7 1 1
## 837 117 7 1 1
## 838 118 7 1 1
## 839 119 7 1 1
## 840 120 7 1 1
## 841 1 8 1 1
## 842 2 8 1 1
## 843 3 8 1 1
## 844 4 8 1 1
## 845 5 8 1 1
## 846 6 8 1 1
## 847 7 8 1 1
## 848 8 8 1 1
## 849 9 8 1 1
## 850 10 8 1 1
## 851 11 8 1 1
## 852 12 8 1 1
## 853 13 8 1 1
## 854 14 8 1 1
## 855 15 8 1 1
## 856 16 8 1 1
## 857 17 8 1 1
## 858 18 8 1 1
## 859 19 8 1 1
## 860 20 8 1 1
## 861 21 8 1 1
## 862 22 8 1 1
## 863 23 8 1 1
## 864 24 8 1 1
## 865 25 8 1 1
## 866 26 8 1 1
## 867 27 8 1 1
## 868 28 8 1 1
## 869 29 8 1 1
## 870 30 8 1 1
## 871 31 8 1 1
## 872 32 8 1 1
## 873 33 8 1 1
## 874 34 8 1 1
## 875 35 8 1 1
## 876 36 8 1 1
## 877 37 8 1 1
## 878 38 8 1 1
## 879 39 8 1 1
## 880 40 8 1 1
## 881 41 8 1 1
## 882 42 8 1 1
## 883 43 8 1 1
## 884 44 8 1 1
## 885 45 8 1 1
## 886 46 8 1 1
## 887 47 8 1 1
## 888 48 8 1 1
## 889 49 8 1 1
## 890 50 8 1 1
## 891 51 8 1 1
## 892 52 8 1 1
## 893 53 8 1 1
## 894 54 8 1 1
## 895 55 8 1 1
## 896 56 8 1 1
## 897 57 8 1 1
## 898 58 8 1 1
## 899 59 8 1 1
## 900 60 8 1 1
## 901 61 8 1 1
## 902 62 8 1 1
## 903 63 8 1 1
## 904 64 8 1 1
## 905 65 8 1 1
## 906 66 8 1 1
## 907 67 8 1 1
## 908 68 8 1 1
## 909 69 8 1 1
## 910 70 8 1 1
## 911 71 8 1 1
## 912 72 8 1 1
## 913 73 8 1 1
## 914 74 8 1 1
## 915 75 8 1 1
## 916 76 8 1 1
## 917 77 8 1 1
## 918 78 8 1 1
## 919 79 8 1 1
## 920 80 8 1 1
## 921 81 8 1 1
## 922 82 8 1 1
## 923 83 8 1 1
## 924 84 8 1 1
## 925 85 8 1 1
## 926 86 8 1 1
## 927 87 8 1 1
## 928 88 8 1 1
## 929 89 8 1 1
## 930 90 8 1 1
## 931 91 8 1 1
## 932 92 8 1 1
## 933 93 8 1 1
## 934 94 8 1 1
## 935 95 8 1 1
## 936 96 8 1 1
## 937 97 8 1 1
## 938 98 8 1 1
## 939 99 8 1 1
## 940 100 8 1 1
## 941 101 8 1 1
## 942 102 8 1 1
## 943 103 8 1 1
## 944 104 8 1 1
## 945 105 8 1 1
## 946 106 8 1 1
## 947 107 8 1 1
## 948 108 8 1 1
## 949 109 8 1 1
## 950 110 8 1 1
## 951 111 8 1 1
## 952 112 8 1 1
## 953 113 8 1 1
## 954 114 8 1 1
## 955 115 8 1 1
## 956 116 8 1 1
## 957 117 8 1 1
## 958 118 8 1 1
## 959 119 8 1 1
## 960 120 8 1 1
## 961 1 9 1 1
## 962 2 9 1 1
## 963 3 9 1 1
## 964 4 9 1 1
## 965 5 9 1 1
## 966 6 9 1 1
## 967 7 9 1 1
## 968 8 9 1 1
## 969 9 9 1 1
## 970 10 9 1 1
## 971 11 9 1 1
## 972 12 9 1 1
## 973 13 9 1 1
## 974 14 9 1 1
## 975 15 9 1 1
## 976 16 9 1 1
## 977 17 9 1 1
## 978 18 9 1 1
## 979 19 9 1 1
## 980 20 9 1 1
## 981 21 9 1 1
## 982 22 9 1 1
## 983 23 9 1 1
## 984 24 9 1 1
## 985 25 9 1 1
## 986 26 9 1 1
## 987 27 9 1 1
## 988 28 9 1 1
## 989 29 9 1 1
## 990 30 9 1 1
## 991 31 9 1 1
## 992 32 9 1 1
## 993 33 9 1 1
## 994 34 9 1 1
## 995 35 9 1 1
## 996 36 9 1 1
## 997 37 9 1 1
## 998 38 9 1 1
## 999 39 9 1 1
## 1000 40 9 1 1
## 1001 41 9 1 1
## 1002 42 9 1 1
## 1003 43 9 1 1
## 1004 44 9 1 1
## 1005 45 9 1 1
## 1006 46 9 1 1
## 1007 47 9 1 1
## 1008 48 9 1 1
## 1009 49 9 1 1
## 1010 50 9 1 1
## 1011 51 9 1 1
## 1012 52 9 1 1
## 1013 53 9 1 1
## 1014 54 9 1 1
## 1015 55 9 1 1
## 1016 56 9 1 1
## 1017 57 9 1 1
## 1018 58 9 1 1
## 1019 59 9 1 1
## 1020 60 9 1 1
## 1021 61 9 1 1
## 1022 62 9 1 1
## 1023 63 9 1 1
## 1024 64 9 1 1
## 1025 65 9 1 1
## 1026 66 9 1 1
## 1027 67 9 1 1
## 1028 68 9 1 1
## 1029 69 9 1 1
## 1030 70 9 1 1
## 1031 71 9 1 1
## 1032 72 9 1 1
## 1033 73 9 1 1
## 1034 74 9 1 1
## 1035 75 9 1 1
## 1036 76 9 1 1
## 1037 77 9 1 1
## 1038 78 9 1 1
## 1039 79 9 1 1
## 1040 80 9 1 1
## 1041 81 9 1 1
## 1042 82 9 1 1
## 1043 83 9 1 1
## 1044 84 9 1 1
## 1045 85 9 1 1
## 1046 86 9 1 1
## 1047 87 9 1 1
## 1048 88 9 1 1
## 1049 89 9 1 1
## 1050 90 9 1 1
## 1051 91 9 1 1
## 1052 92 9 1 1
## 1053 93 9 1 1
## 1054 94 9 1 1
## 1055 95 9 1 1
## 1056 96 9 1 1
## 1057 97 9 1 1
## 1058 98 9 1 1
## 1059 99 9 1 1
## 1060 100 9 1 1
## 1061 101 9 1 1
## 1062 102 9 1 1
## 1063 103 9 1 1
## 1064 104 9 1 1
## 1065 105 9 1 1
## 1066 106 9 1 1
## 1067 107 9 1 1
## 1068 108 9 1 1
## 1069 109 9 1 1
## 1070 110 9 1 1
## 1071 111 9 1 1
## 1072 112 9 1 1
## 1073 113 9 1 1
## 1074 114 9 1 1
## 1075 115 9 1 1
## 1076 116 9 1 1
## 1077 117 9 1 1
## 1078 118 9 1 1
## 1079 119 9 1 1
## 1080 120 9 1 1
## 1081 1 10 1 1
## 1082 2 10 1 1
## 1083 3 10 1 1
## 1084 4 10 1 1
## 1085 5 10 1 1
## 1086 6 10 1 1
## 1087 7 10 1 1
## 1088 8 10 1 1
## 1089 9 10 1 1
## 1090 10 10 1 1
## 1091 11 10 1 1
## 1092 12 10 1 1
## 1093 13 10 1 1
## 1094 14 10 1 1
## 1095 15 10 1 1
## 1096 16 10 1 1
## 1097 17 10 1 1
## 1098 18 10 1 1
## 1099 19 10 1 1
## 1100 20 10 1 1
## 1101 21 10 1 1
## 1102 22 10 1 1
## 1103 23 10 1 1
## 1104 24 10 1 1
## 1105 25 10 1 1
## 1106 26 10 1 1
## 1107 27 10 1 1
## 1108 28 10 1 1
## 1109 29 10 1 1
## 1110 30 10 1 1
## 1111 31 10 1 1
## 1112 32 10 1 1
## 1113 33 10 1 1
## 1114 34 10 1 1
## 1115 35 10 1 1
## 1116 36 10 1 1
## 1117 37 10 1 1
## 1118 38 10 1 1
## 1119 39 10 1 1
## 1120 40 10 1 1
## 1121 41 10 1 1
## 1122 42 10 1 1
## 1123 43 10 1 1
## 1124 44 10 1 1
## 1125 45 10 1 1
## 1126 46 10 1 1
## 1127 47 10 1 1
## 1128 48 10 1 1
## 1129 49 10 1 1
## 1130 50 10 1 1
## 1131 51 10 1 1
## 1132 52 10 1 1
## 1133 53 10 1 1
## 1134 54 10 1 1
## 1135 55 10 1 1
## 1136 56 10 1 1
## 1137 57 10 1 1
## 1138 58 10 1 1
## 1139 59 10 1 1
## 1140 60 10 1 1
## 1141 61 10 1 1
## 1142 62 10 1 1
## 1143 63 10 1 1
## 1144 64 10 1 1
## 1145 65 10 1 1
## 1146 66 10 1 1
## 1147 67 10 1 1
## 1148 68 10 1 1
## 1149 69 10 1 1
## 1150 70 10 1 1
## 1151 71 10 1 1
## 1152 72 10 1 1
## 1153 73 10 1 1
## 1154 74 10 1 1
## 1155 75 10 1 1
## 1156 76 10 1 1
## 1157 77 10 1 1
## 1158 78 10 1 1
## 1159 79 10 1 1
## 1160 80 10 1 1
## 1161 81 10 1 1
## 1162 82 10 1 1
## 1163 83 10 1 1
## 1164 84 10 1 1
## 1165 85 10 1 1
## 1166 86 10 1 1
## 1167 87 10 1 1
## 1168 88 10 1 1
## 1169 89 10 1 1
## 1170 90 10 1 1
## 1171 91 10 1 1
## 1172 92 10 1 1
## 1173 93 10 1 1
## 1174 94 10 1 1
## 1175 95 10 1 1
## 1176 96 10 1 1
## 1177 97 10 1 1
## 1178 98 10 1 1
## 1179 99 10 1 1
## 1180 100 10 1 1
## 1181 101 10 1 1
## 1182 102 10 1 1
## 1183 103 10 1 1
## 1184 104 10 1 1
## 1185 105 10 1 1
## 1186 106 10 1 1
## 1187 107 10 1 1
## 1188 108 10 1 1
## 1189 109 10 1 1
## 1190 110 10 1 1
## 1191 111 10 1 1
## 1192 112 10 1 1
## 1193 113 10 1 1
## 1194 114 10 1 1
## 1195 115 10 1 1
## 1196 116 10 1 1
## 1197 117 10 1 1
## 1198 118 10 1 1
## 1199 119 10 1 1
## 1200 120 10 1 1
## 1201 1 11 1 1
## 1202 2 11 1 1
## 1203 3 11 1 1
## 1204 4 11 1 1
## 1205 5 11 1 1
## 1206 6 11 1 1
## 1207 7 11 1 1
## 1208 8 11 1 1
## 1209 9 11 1 1
## 1210 10 11 1 1
## 1211 11 11 1 1
## 1212 12 11 1 1
## 1213 13 11 1 1
## 1214 14 11 1 1
## 1215 15 11 1 1
## 1216 16 11 1 1
## 1217 17 11 1 1
## 1218 18 11 1 1
## 1219 19 11 1 1
## 1220 20 11 1 1
## 1221 21 11 1 1
## 1222 22 11 1 1
## 1223 23 11 1 1
## 1224 24 11 1 1
## 1225 25 11 1 1
## 1226 26 11 1 1
## 1227 27 11 1 1
## 1228 28 11 1 1
## 1229 29 11 1 1
## 1230 30 11 1 1
## 1231 31 11 1 1
## 1232 32 11 1 1
## 1233 33 11 1 1
## 1234 34 11 1 1
## 1235 35 11 1 1
## 1236 36 11 1 1
## 1237 37 11 1 1
## 1238 38 11 1 1
## 1239 39 11 1 1
## 1240 40 11 1 1
## 1241 41 11 1 1
## 1242 42 11 1 1
## 1243 43 11 1 1
## 1244 44 11 1 1
## 1245 45 11 1 1
## 1246 46 11 1 1
## 1247 47 11 1 1
## 1248 48 11 1 1
## 1249 49 11 1 1
## 1250 50 11 1 1
## 1251 51 11 1 1
## 1252 52 11 1 1
## 1253 53 11 1 1
## 1254 54 11 1 1
## 1255 55 11 1 1
## 1256 56 11 1 1
## 1257 57 11 1 1
## 1258 58 11 1 1
## 1259 59 11 1 1
## 1260 60 11 1 1
## 1261 61 11 1 1
## 1262 62 11 1 1
## 1263 63 11 1 1
## 1264 64 11 1 1
## 1265 65 11 1 1
## 1266 66 11 1 1
## 1267 67 11 1 1
## 1268 68 11 1 1
## 1269 69 11 1 1
## 1270 70 11 1 1
## 1271 71 11 1 1
## 1272 72 11 1 1
## 1273 73 11 1 1
## 1274 74 11 1 1
## 1275 75 11 1 1
## 1276 76 11 1 1
## 1277 77 11 1 1
## 1278 78 11 1 1
## 1279 79 11 1 1
## 1280 80 11 1 1
## 1281 81 11 1 1
## 1282 82 11 1 1
## 1283 83 11 1 1
## 1284 84 11 1 1
## 1285 85 11 1 1
## 1286 86 11 1 1
## 1287 87 11 1 1
## 1288 88 11 1 1
## 1289 89 11 1 1
## 1290 90 11 1 1
## 1291 91 11 1 1
## 1292 92 11 1 1
## 1293 93 11 1 1
## 1294 94 11 1 1
## 1295 95 11 1 1
## 1296 96 11 1 1
## 1297 97 11 1 1
## 1298 98 11 1 1
## 1299 99 11 1 1
## 1300 100 11 1 1
## 1301 101 11 1 1
## 1302 102 11 1 1
## 1303 103 11 1 1
## 1304 104 11 1 1
## 1305 105 11 1 1
## 1306 106 11 1 1
## 1307 107 11 1 1
## 1308 108 11 1 1
## 1309 109 11 1 1
## 1310 110 11 1 1
## 1311 111 11 1 1
## 1312 112 11 1 1
## 1313 113 11 1 1
## 1314 114 11 1 1
## 1315 115 11 1 1
## 1316 116 11 1 1
## 1317 117 11 1 1
## 1318 118 11 1 1
## 1319 119 11 1 1
## 1320 120 11 1 1
## 1321 1 12 1 1
## 1322 2 12 1 1
## 1323 3 12 1 1
## 1324 4 12 1 1
## 1325 5 12 1 1
## 1326 6 12 1 1
## 1327 7 12 1 1
## 1328 8 12 1 1
## 1329 9 12 1 1
## 1330 10 12 1 1
## 1331 11 12 1 1
## 1332 12 12 1 1
## 1333 13 12 1 1
## 1334 14 12 1 1
## 1335 15 12 1 1
## 1336 16 12 1 1
## 1337 17 12 1 1
## 1338 18 12 1 1
## 1339 19 12 1 1
## 1340 20 12 1 1
## 1341 21 12 1 1
## 1342 22 12 1 1
## 1343 23 12 1 1
## 1344 24 12 1 1
## 1345 25 12 1 1
## 1346 26 12 1 1
## 1347 27 12 1 1
## 1348 28 12 1 1
## 1349 29 12 1 1
## 1350 30 12 1 1
## 1351 31 12 1 1
## 1352 32 12 1 1
## 1353 33 12 1 1
## 1354 34 12 1 1
## 1355 35 12 1 1
## 1356 36 12 1 1
## 1357 37 12 1 1
## 1358 38 12 1 1
## 1359 39 12 1 1
## 1360 40 12 1 1
## 1361 41 12 1 1
## 1362 42 12 1 1
## 1363 43 12 1 1
## 1364 44 12 1 1
## 1365 45 12 1 1
## 1366 46 12 1 1
## 1367 47 12 1 1
## 1368 48 12 1 1
## 1369 49 12 1 1
## 1370 50 12 1 1
## 1371 51 12 1 1
## 1372 52 12 1 1
## 1373 53 12 1 1
## 1374 54 12 1 1
## 1375 55 12 1 1
## 1376 56 12 1 1
## 1377 57 12 1 1
## 1378 58 12 1 1
## 1379 59 12 1 1
## 1380 60 12 1 1
## 1381 61 12 1 1
## 1382 62 12 1 1
## 1383 63 12 1 1
## 1384 64 12 1 1
## 1385 65 12 1 1
## 1386 66 12 1 1
## 1387 67 12 1 1
## 1388 68 12 1 1
## 1389 69 12 1 1
## 1390 70 12 1 1
## 1391 71 12 1 1
## 1392 72 12 1 1
## 1393 73 12 1 1
## 1394 74 12 1 1
## 1395 75 12 1 1
## 1396 76 12 1 1
## 1397 77 12 1 1
## 1398 78 12 1 1
## 1399 79 12 1 1
## 1400 80 12 1 1
## 1401 81 12 1 1
## 1402 82 12 1 1
## 1403 83 12 1 1
## 1404 84 12 1 1
## 1405 85 12 1 1
## 1406 86 12 1 1
## 1407 87 12 1 1
## 1408 88 12 1 1
## 1409 89 12 1 1
## 1410 90 12 1 1
## 1411 91 12 1 1
## 1412 92 12 1 1
## 1413 93 12 1 1
## 1414 94 12 1 1
## 1415 95 12 1 1
## 1416 96 12 1 1
## 1417 97 12 1 1
## 1418 98 12 1 1
## 1419 99 12 1 1
## 1420 100 12 1 1
## 1421 101 12 1 1
## 1422 102 12 1 1
## 1423 103 12 1 1
## 1424 104 12 1 1
## 1425 105 12 1 1
## 1426 106 12 1 1
## 1427 107 12 1 1
## 1428 108 12 1 1
## 1429 109 12 1 1
## 1430 110 12 1 1
## 1431 111 12 1 1
## 1432 112 12 1 1
## 1433 113 12 1 1
## 1434 114 12 1 1
## 1435 115 12 1 1
## 1436 116 12 1 1
## 1437 117 12 1 1
## 1438 118 12 1 1
## 1439 119 12 1 1
## 1440 120 12 1 1
## 1441 1 13 1 1
## 1442 2 13 1 1
## 1443 3 13 1 1
## 1444 4 13 1 1
## 1445 5 13 1 1
## 1446 6 13 1 1
## 1447 7 13 1 1
## 1448 8 13 1 1
## 1449 9 13 1 1
## 1450 10 13 1 1
## 1451 11 13 1 1
## 1452 12 13 1 1
## 1453 13 13 1 1
## 1454 14 13 1 1
## 1455 15 13 1 1
## 1456 16 13 1 1
## 1457 17 13 1 1
## 1458 18 13 1 1
## 1459 19 13 1 1
## 1460 20 13 1 1
## 1461 21 13 1 1
## 1462 22 13 1 1
## 1463 23 13 1 1
## 1464 24 13 1 1
## 1465 25 13 1 1
## 1466 26 13 1 1
## 1467 27 13 1 1
## 1468 28 13 1 1
## 1469 29 13 1 1
## 1470 30 13 1 1
## 1471 31 13 1 1
## 1472 32 13 1 1
## 1473 33 13 1 1
## 1474 34 13 1 1
## 1475 35 13 1 1
## 1476 36 13 1 1
## 1477 37 13 1 1
## 1478 38 13 1 1
## 1479 39 13 1 1
## 1480 40 13 1 1
## 1481 41 13 1 1
## 1482 42 13 1 1
## 1483 43 13 1 1
## 1484 44 13 1 1
## 1485 45 13 1 1
## 1486 46 13 1 1
## 1487 47 13 1 1
## 1488 48 13 1 1
## 1489 49 13 1 1
## 1490 50 13 1 1
## 1491 51 13 1 1
## 1492 52 13 1 1
## 1493 53 13 1 1
## 1494 54 13 1 1
## 1495 55 13 1 1
## 1496 56 13 1 1
## 1497 57 13 1 1
## 1498 58 13 1 1
## 1499 59 13 1 1
## 1500 60 13 1 1
## 1501 61 13 1 1
## 1502 62 13 1 1
## 1503 63 13 1 1
## 1504 64 13 1 1
## 1505 65 13 1 1
## 1506 66 13 1 1
## 1507 67 13 1 1
## 1508 68 13 1 1
## 1509 69 13 1 1
## 1510 70 13 1 1
## 1511 71 13 1 1
## 1512 72 13 1 1
## 1513 73 13 1 1
## 1514 74 13 1 1
## 1515 75 13 1 1
## 1516 76 13 1 1
## 1517 77 13 1 1
## 1518 78 13 1 1
## 1519 79 13 1 1
## 1520 80 13 1 1
## 1521 81 13 1 1
## 1522 82 13 1 1
## 1523 83 13 1 1
## 1524 84 13 1 1
## 1525 85 13 1 1
## 1526 86 13 1 1
## 1527 87 13 1 1
## 1528 88 13 1 1
## 1529 89 13 1 1
## 1530 90 13 1 1
## 1531 91 13 1 1
## 1532 92 13 1 1
## 1533 93 13 1 1
## 1534 94 13 1 1
## 1535 95 13 1 1
## 1536 96 13 1 1
## 1537 97 13 1 1
## 1538 98 13 1 1
## 1539 99 13 1 1
## 1540 100 13 1 1
## 1541 101 13 1 1
## 1542 102 13 1 1
## 1543 103 13 1 1
## 1544 104 13 1 1
## 1545 105 13 1 1
## 1546 106 13 1 1
## 1547 107 13 1 1
## 1548 108 13 1 1
## 1549 109 13 1 1
## 1550 110 13 1 1
## 1551 111 13 1 1
## 1552 112 13 1 1
## 1553 113 13 1 1
## 1554 114 13 1 1
## 1555 115 13 1 1
## 1556 116 13 1 1
## 1557 117 13 1 1
## 1558 118 13 1 1
## 1559 119 13 1 1
## 1560 120 13 1 1
## 1561 1 14 1 1
## 1562 2 14 1 1
## 1563 3 14 1 1
## 1564 4 14 1 1
## 1565 5 14 1 1
## 1566 6 14 1 1
## 1567 7 14 1 1
## 1568 8 14 1 1
## 1569 9 14 1 1
## 1570 10 14 1 1
## 1571 11 14 1 1
## 1572 12 14 1 1
## 1573 13 14 1 1
## 1574 14 14 1 1
## 1575 15 14 1 1
## 1576 16 14 1 1
## 1577 17 14 1 1
## 1578 18 14 1 1
## 1579 19 14 1 1
## 1580 20 14 1 1
## 1581 21 14 1 1
## 1582 22 14 1 1
## 1583 23 14 1 1
## 1584 24 14 1 1
## 1585 25 14 1 1
## 1586 26 14 1 1
## 1587 27 14 1 1
## 1588 28 14 1 1
## 1589 29 14 1 1
## 1590 30 14 1 1
## 1591 31 14 1 1
## 1592 32 14 1 1
## 1593 33 14 1 1
## 1594 34 14 1 1
## 1595 35 14 1 1
## 1596 36 14 1 1
## 1597 37 14 1 1
## 1598 38 14 1 1
## 1599 39 14 1 1
## 1600 40 14 1 1
## 1601 41 14 1 1
## 1602 42 14 1 1
## 1603 43 14 1 1
## 1604 44 14 1 1
## 1605 45 14 1 1
## 1606 46 14 1 1
## 1607 47 14 1 1
## 1608 48 14 1 1
## 1609 49 14 1 1
## 1610 50 14 1 1
## 1611 51 14 1 1
## 1612 52 14 1 1
## 1613 53 14 1 1
## 1614 54 14 1 1
## 1615 55 14 1 1
## 1616 56 14 1 1
## 1617 57 14 1 1
## 1618 58 14 1 1
## 1619 59 14 1 1
## 1620 60 14 1 1
## 1621 61 14 1 1
## 1622 62 14 1 1
## 1623 63 14 1 1
## 1624 64 14 1 1
## 1625 65 14 1 1
## 1626 66 14 1 1
## 1627 67 14 1 1
## 1628 68 14 1 1
## 1629 69 14 1 1
## 1630 70 14 1 1
## 1631 71 14 1 1
## 1632 72 14 1 1
## 1633 73 14 1 1
## 1634 74 14 1 1
## 1635 75 14 1 1
## 1636 76 14 1 1
## 1637 77 14 1 1
## 1638 78 14 1 1
## 1639 79 14 1 1
## 1640 80 14 1 1
## 1641 81 14 1 1
## 1642 82 14 1 1
## 1643 83 14 1 1
## 1644 84 14 1 1
## 1645 85 14 1 1
## 1646 86 14 1 1
## 1647 87 14 1 1
## 1648 88 14 1 1
## 1649 89 14 1 1
## 1650 90 14 1 1
## 1651 91 14 1 1
## 1652 92 14 1 1
## 1653 93 14 1 1
## 1654 94 14 1 1
## 1655 95 14 1 1
## 1656 96 14 1 1
## 1657 97 14 1 1
## 1658 98 14 1 1
## 1659 99 14 1 1
## 1660 100 14 1 1
## 1661 101 14 1 1
## 1662 102 14 1 1
## 1663 103 14 1 1
## 1664 104 14 1 1
## 1665 105 14 1 1
## 1666 106 14 1 1
## 1667 107 14 1 1
## 1668 108 14 1 1
## 1669 109 14 1 1
## 1670 110 14 1 1
## 1671 111 14 1 1
## 1672 112 14 1 1
## 1673 113 14 1 1
## 1674 114 14 1 1
## 1675 115 14 1 1
## 1676 116 14 1 1
## 1677 117 14 1 1
## 1678 118 14 1 1
## 1679 119 14 1 1
## 1680 120 14 1 1
## 1681 1 15 1 1
## 1682 2 15 1 1
## 1683 3 15 1 1
## 1684 4 15 1 1
## 1685 5 15 1 1
## 1686 6 15 1 1
## 1687 7 15 1 1
## 1688 8 15 1 1
## 1689 9 15 1 1
## 1690 10 15 1 1
## 1691 11 15 1 1
## 1692 12 15 1 1
## 1693 13 15 1 1
## 1694 14 15 1 1
## 1695 15 15 1 1
## 1696 16 15 1 1
## 1697 17 15 1 1
## 1698 18 15 1 1
## 1699 19 15 1 1
## 1700 20 15 1 1
## 1701 21 15 1 1
## 1702 22 15 1 1
## 1703 23 15 1 1
## 1704 24 15 1 1
## 1705 25 15 1 1
## 1706 26 15 1 1
## 1707 27 15 1 1
## 1708 28 15 1 1
## 1709 29 15 1 1
## 1710 30 15 1 1
## 1711 31 15 1 1
## 1712 32 15 1 1
## 1713 33 15 1 1
## 1714 34 15 1 1
## 1715 35 15 1 1
## 1716 36 15 1 1
## 1717 37 15 1 1
## 1718 38 15 1 1
## 1719 39 15 1 1
## 1720 40 15 1 1
## 1721 41 15 1 1
## 1722 42 15 1 1
## 1723 43 15 1 1
## 1724 44 15 1 1
## 1725 45 15 1 1
## 1726 46 15 1 1
## 1727 47 15 1 1
## 1728 48 15 1 1
## 1729 49 15 1 1
## 1730 50 15 1 1
## 1731 51 15 1 1
## 1732 52 15 1 1
## 1733 53 15 1 1
## 1734 54 15 1 1
## 1735 55 15 1 1
## 1736 56 15 1 1
## 1737 57 15 1 1
## 1738 58 15 1 1
## 1739 59 15 1 1
## 1740 60 15 1 1
## 1741 61 15 1 1
## 1742 62 15 1 1
## 1743 63 15 1 1
## 1744 64 15 1 1
## 1745 65 15 1 1
## 1746 66 15 1 1
## 1747 67 15 1 1
## 1748 68 15 1 1
## 1749 69 15 1 1
## 1750 70 15 1 1
## 1751 71 15 1 1
## 1752 72 15 1 1
## 1753 73 15 1 1
## 1754 74 15 1 1
## 1755 75 15 1 1
## 1756 76 15 1 1
## 1757 77 15 1 1
## 1758 78 15 1 1
## 1759 79 15 1 1
## 1760 80 15 1 1
## 1761 81 15 1 1
## 1762 82 15 1 1
## 1763 83 15 1 1
## 1764 84 15 1 1
## 1765 85 15 1 1
## 1766 86 15 1 1
## 1767 87 15 1 1
## 1768 88 15 1 1
## 1769 89 15 1 1
## 1770 90 15 1 1
## 1771 91 15 1 1
## 1772 92 15 1 1
## 1773 93 15 1 1
## 1774 94 15 1 1
## 1775 95 15 1 1
## 1776 96 15 1 1
## 1777 97 15 1 1
## 1778 98 15 1 1
## 1779 99 15 1 1
## 1780 100 15 1 1
## 1781 101 15 1 1
## 1782 102 15 1 1
## 1783 103 15 1 1
## 1784 104 15 1 1
## 1785 105 15 1 1
## 1786 106 15 1 1
## 1787 107 15 1 1
## 1788 108 15 1 1
## 1789 109 15 1 1
## 1790 110 15 1 1
## 1791 111 15 1 1
## 1792 112 15 1 1
## 1793 113 15 1 1
## 1794 114 15 1 1
## 1795 115 15 1 1
## 1796 116 15 1 1
## 1797 117 15 1 1
## 1798 118 15 1 1
## 1799 119 15 1 1
## 1800 120 15 1 1
## 1801 1 16 1 1
## 1802 2 16 1 1
## 1803 3 16 1 1
## 1804 4 16 1 1
## 1805 5 16 1 1
## 1806 6 16 1 1
## 1807 7 16 1 1
## 1808 8 16 1 1
## 1809 9 16 1 1
## 1810 10 16 1 1
## 1811 11 16 1 1
## 1812 12 16 1 1
## 1813 13 16 1 1
## 1814 14 16 1 1
## 1815 15 16 1 1
## 1816 16 16 1 1
## 1817 17 16 1 1
## 1818 18 16 1 1
## 1819 19 16 1 1
## 1820 20 16 1 1
## 1821 21 16 1 1
## 1822 22 16 1 1
## 1823 23 16 1 1
## 1824 24 16 1 1
## 1825 25 16 1 1
## 1826 26 16 1 1
## 1827 27 16 1 1
## 1828 28 16 1 1
## 1829 29 16 1 1
## 1830 30 16 1 1
## 1831 31 16 1 1
## 1832 32 16 1 1
## 1833 33 16 1 1
## 1834 34 16 1 1
## 1835 35 16 1 1
## 1836 36 16 1 1
## 1837 37 16 1 1
## 1838 38 16 1 1
## 1839 39 16 1 1
## 1840 40 16 1 1
## 1841 41 16 1 1
## 1842 42 16 1 1
## 1843 43 16 1 1
## 1844 44 16 1 1
## 1845 45 16 1 1
## 1846 46 16 1 1
## 1847 47 16 1 1
## 1848 48 16 1 1
## 1849 49 16 1 1
## 1850 50 16 1 1
## 1851 51 16 1 1
## 1852 52 16 1 1
## 1853 53 16 1 1
## 1854 54 16 1 1
## 1855 55 16 1 1
## 1856 56 16 1 1
## 1857 57 16 1 1
## 1858 58 16 1 1
## 1859 59 16 1 1
## 1860 60 16 1 1
## 1861 61 16 1 1
## 1862 62 16 1 1
## 1863 63 16 1 1
## 1864 64 16 1 1
## 1865 65 16 1 1
## 1866 66 16 1 1
## 1867 67 16 1 1
## 1868 68 16 1 1
## 1869 69 16 1 1
## 1870 70 16 1 1
## 1871 71 16 1 1
## 1872 72 16 1 1
## 1873 73 16 1 1
## 1874 74 16 1 1
## 1875 75 16 1 1
## 1876 76 16 1 1
## 1877 77 16 1 1
## 1878 78 16 1 1
## 1879 79 16 1 1
## 1880 80 16 1 1
## 1881 81 16 1 1
## 1882 82 16 1 1
## 1883 83 16 1 1
## 1884 84 16 1 1
## 1885 85 16 1 1
## 1886 86 16 1 1
## 1887 87 16 1 1
## 1888 88 16 1 1
## 1889 89 16 1 1
## 1890 90 16 1 1
## 1891 91 16 1 1
## 1892 92 16 1 1
## 1893 93 16 1 1
## 1894 94 16 1 1
## 1895 95 16 1 1
## 1896 96 16 1 1
## 1897 97 16 1 1
## 1898 98 16 1 1
## 1899 99 16 1 1
## 1900 100 16 1 1
## 1901 101 16 1 1
## 1902 102 16 1 1
## 1903 103 16 1 1
## 1904 104 16 1 1
## 1905 105 16 1 1
## 1906 106 16 1 1
## 1907 107 16 1 1
## 1908 108 16 1 1
## 1909 109 16 1 1
## 1910 110 16 1 1
## 1911 111 16 1 1
## 1912 112 16 1 1
## 1913 113 16 1 1
## 1914 114 16 1 1
## 1915 115 16 1 1
## 1916 116 16 1 1
## 1917 117 16 1 1
## 1918 118 16 1 1
## 1919 119 16 1 1
## 1920 120 16 1 1
## 1921 1 17 1 1
## 1922 2 17 1 1
## 1923 3 17 1 1
## 1924 4 17 1 1
## 1925 5 17 1 1
## 1926 6 17 1 1
## 1927 7 17 1 1
## 1928 8 17 1 1
## 1929 9 17 1 1
## 1930 10 17 1 1
## 1931 11 17 1 1
## 1932 12 17 1 1
## 1933 13 17 1 1
## 1934 14 17 1 1
## 1935 15 17 1 1
## 1936 16 17 1 1
## 1937 17 17 1 1
## 1938 18 17 1 1
## 1939 19 17 1 1
## 1940 20 17 1 1
## 1941 21 17 1 1
## 1942 22 17 1 1
## 1943 23 17 1 1
## 1944 24 17 1 1
## 1945 25 17 1 1
## 1946 26 17 1 1
## 1947 27 17 1 1
## 1948 28 17 1 1
## 1949 29 17 1 1
## 1950 30 17 1 1
## 1951 31 17 1 1
## 1952 32 17 1 1
## 1953 33 17 1 1
## 1954 34 17 1 1
## 1955 35 17 1 1
## 1956 36 17 1 1
## 1957 37 17 1 1
## 1958 38 17 1 1
## 1959 39 17 1 1
## 1960 40 17 1 1
## 1961 41 17 1 1
## 1962 42 17 1 1
## 1963 43 17 1 1
## 1964 44 17 1 1
## 1965 45 17 1 1
## 1966 46 17 1 1
## 1967 47 17 1 1
## 1968 48 17 1 1
## 1969 49 17 1 1
## 1970 50 17 1 1
## 1971 51 17 1 1
## 1972 52 17 1 1
## 1973 53 17 1 1
## 1974 54 17 1 1
## 1975 55 17 1 1
## 1976 56 17 1 1
## 1977 57 17 1 1
## 1978 58 17 1 1
## 1979 59 17 1 1
## 1980 60 17 1 1
## 1981 61 17 1 1
## 1982 62 17 1 1
## 1983 63 17 1 1
## 1984 64 17 1 1
## 1985 65 17 1 1
## 1986 66 17 1 1
## 1987 67 17 1 1
## 1988 68 17 1 1
## 1989 69 17 1 1
## 1990 70 17 1 1
## 1991 71 17 1 1
## 1992 72 17 1 1
## 1993 73 17 1 1
## 1994 74 17 1 1
## 1995 75 17 1 1
## 1996 76 17 1 1
## 1997 77 17 1 1
## 1998 78 17 1 1
## 1999 79 17 1 1
## 2000 80 17 1 1
## 2001 81 17 1 1
## 2002 82 17 1 1
## 2003 83 17 1 1
## 2004 84 17 1 1
## 2005 85 17 1 1
## 2006 86 17 1 1
## 2007 87 17 1 1
## 2008 88 17 1 1
## 2009 89 17 1 1
## 2010 90 17 1 1
## 2011 91 17 1 1
## 2012 92 17 1 1
## 2013 93 17 1 1
## 2014 94 17 1 1
## 2015 95 17 1 1
## 2016 96 17 1 1
## 2017 97 17 1 1
## 2018 98 17 1 1
## 2019 99 17 1 1
## 2020 100 17 1 1
## 2021 101 17 1 1
## 2022 102 17 1 1
## 2023 103 17 1 1
## 2024 104 17 1 1
## 2025 105 17 1 1
## 2026 106 17 1 1
## 2027 107 17 1 1
## 2028 108 17 1 1
## 2029 109 17 1 1
## 2030 110 17 1 1
## 2031 111 17 1 1
## 2032 112 17 1 1
## 2033 113 17 1 1
## 2034 114 17 1 1
## 2035 115 17 1 1
## 2036 116 17 1 1
## 2037 117 17 1 1
## 2038 118 17 1 1
## 2039 119 17 1 1
## 2040 120 17 1 1
## 2041 1 18 1 1
## 2042 2 18 1 1
## 2043 3 18 1 1
## 2044 4 18 1 1
## 2045 5 18 1 1
## 2046 6 18 1 1
## 2047 7 18 1 1
## 2048 8 18 1 1
## 2049 9 18 1 1
## 2050 10 18 1 1
## 2051 11 18 1 1
## 2052 12 18 1 1
## 2053 13 18 1 1
## 2054 14 18 1 1
## 2055 15 18 1 1
## 2056 16 18 1 1
## 2057 17 18 1 1
## 2058 18 18 1 1
## 2059 19 18 1 1
## 2060 20 18 1 1
## 2061 21 18 1 1
## 2062 22 18 1 1
## 2063 23 18 1 1
## 2064 24 18 1 1
## 2065 25 18 1 1
## 2066 26 18 1 1
## 2067 27 18 1 1
## 2068 28 18 1 1
## 2069 29 18 1 1
## 2070 30 18 1 1
## 2071 31 18 1 1
## 2072 32 18 1 1
## 2073 33 18 1 1
## 2074 34 18 1 1
## 2075 35 18 1 1
## 2076 36 18 1 1
## 2077 37 18 1 1
## 2078 38 18 1 1
## 2079 39 18 1 1
## 2080 40 18 1 1
## 2081 41 18 1 1
## 2082 42 18 1 1
## 2083 43 18 1 1
## 2084 44 18 1 1
## 2085 45 18 1 1
## 2086 46 18 1 1
## 2087 47 18 1 1
## 2088 48 18 1 1
## 2089 49 18 1 1
## 2090 50 18 1 1
## 2091 51 18 1 1
## 2092 52 18 1 1
## 2093 53 18 1 1
## 2094 54 18 1 1
## 2095 55 18 1 1
## 2096 56 18 1 1
## 2097 57 18 1 1
## 2098 58 18 1 1
## 2099 59 18 1 1
## 2100 60 18 1 1
## 2101 61 18 1 1
## 2102 62 18 1 1
## 2103 63 18 1 1
## 2104 64 18 1 1
## 2105 65 18 1 1
## 2106 66 18 1 1
## 2107 67 18 1 1
## 2108 68 18 1 1
## 2109 69 18 1 1
## 2110 70 18 1 1
## 2111 71 18 1 1
## 2112 72 18 1 1
## 2113 73 18 1 1
## 2114 74 18 1 1
## 2115 75 18 1 1
## 2116 76 18 1 1
## 2117 77 18 1 1
## 2118 78 18 1 1
## 2119 79 18 1 1
## 2120 80 18 1 1
## 2121 81 18 1 1
## 2122 82 18 1 1
## 2123 83 18 1 1
## 2124 84 18 1 1
## 2125 85 18 1 1
## 2126 86 18 1 1
## 2127 87 18 1 1
## 2128 88 18 1 1
## 2129 89 18 1 1
## 2130 90 18 1 1
## 2131 91 18 1 1
## 2132 92 18 1 1
## 2133 93 18 1 1
## 2134 94 18 1 1
## 2135 95 18 1 1
## 2136 96 18 1 1
## 2137 97 18 1 1
## 2138 98 18 1 1
## 2139 99 18 1 1
## 2140 100 18 1 1
## 2141 101 18 1 1
## 2142 102 18 1 1
## 2143 103 18 1 1
## 2144 104 18 1 1
## 2145 105 18 1 1
## 2146 106 18 1 1
## 2147 107 18 1 1
## 2148 108 18 1 1
## 2149 109 18 1 1
## 2150 110 18 1 1
## 2151 111 18 1 1
## 2152 112 18 1 1
## 2153 113 18 1 1
## 2154 114 18 1 1
## 2155 115 18 1 1
## 2156 116 18 1 1
## 2157 117 18 1 1
## 2158 118 18 1 1
## 2159 119 18 1 1
## 2160 120 18 1 1
## 2161 1 19 1 1
## 2162 2 19 1 1
## 2163 3 19 1 1
## 2164 4 19 1 1
## 2165 5 19 1 1
## 2166 6 19 1 1
## 2167 7 19 1 1
## 2168 8 19 1 1
## 2169 9 19 1 1
## 2170 10 19 1 1
## 2171 11 19 1 1
## 2172 12 19 1 1
## 2173 13 19 1 1
## 2174 14 19 1 1
## 2175 15 19 1 1
## 2176 16 19 1 1
## 2177 17 19 1 1
## 2178 18 19 1 1
## 2179 19 19 1 1
## 2180 20 19 1 1
## 2181 21 19 1 1
## 2182 22 19 1 1
## 2183 23 19 1 1
## 2184 24 19 1 1
## 2185 25 19 1 1
## 2186 26 19 1 1
## 2187 27 19 1 1
## 2188 28 19 1 1
## 2189 29 19 1 1
## 2190 30 19 1 1
## 2191 31 19 1 1
## 2192 32 19 1 1
## 2193 33 19 1 1
## 2194 34 19 1 1
## 2195 35 19 1 1
## 2196 36 19 1 1
## 2197 37 19 1 1
## 2198 38 19 1 1
## 2199 39 19 1 1
## 2200 40 19 1 1
## 2201 41 19 1 1
## 2202 42 19 1 1
## 2203 43 19 1 1
## 2204 44 19 1 1
## 2205 45 19 1 1
## 2206 46 19 1 1
## 2207 47 19 1 1
## 2208 48 19 1 1
## 2209 49 19 1 1
## 2210 50 19 1 1
## 2211 51 19 1 1
## 2212 52 19 1 1
## 2213 53 19 1 1
## 2214 54 19 1 1
## 2215 55 19 1 1
## 2216 56 19 1 1
## 2217 57 19 1 1
## 2218 58 19 1 1
## 2219 59 19 1 1
## 2220 60 19 1 1
## 2221 61 19 1 1
## 2222 62 19 1 1
## 2223 63 19 1 1
## 2224 64 19 1 1
## 2225 65 19 1 1
## 2226 66 19 1 1
## 2227 67 19 1 1
## 2228 68 19 1 1
## 2229 69 19 1 1
## 2230 70 19 1 1
## 2231 71 19 1 1
## 2232 72 19 1 1
## 2233 73 19 1 1
## 2234 74 19 1 1
## 2235 75 19 1 1
## 2236 76 19 1 1
## 2237 77 19 1 1
## 2238 78 19 1 1
## 2239 79 19 1 1
## 2240 80 19 1 1
## 2241 81 19 1 1
## 2242 82 19 1 1
## 2243 83 19 1 1
## 2244 84 19 1 1
## 2245 85 19 1 1
## 2246 86 19 1 1
## 2247 87 19 1 1
## 2248 88 19 1 1
## 2249 89 19 1 1
## 2250 90 19 1 1
## 2251 91 19 1 1
## 2252 92 19 1 1
## 2253 93 19 1 1
## 2254 94 19 1 1
## 2255 95 19 1 1
## 2256 96 19 1 1
## 2257 97 19 1 1
## 2258 98 19 1 1
## 2259 99 19 1 1
## 2260 100 19 1 1
## 2261 101 19 1 1
## 2262 102 19 1 1
## 2263 103 19 1 1
## 2264 104 19 1 1
## 2265 105 19 1 1
## 2266 106 19 1 1
## 2267 107 19 1 1
## 2268 108 19 1 1
## 2269 109 19 1 1
## 2270 110 19 1 1
## 2271 111 19 1 1
## 2272 112 19 1 1
## 2273 113 19 1 1
## 2274 114 19 1 1
## 2275 115 19 1 1
## 2276 116 19 1 1
## 2277 117 19 1 1
## 2278 118 19 1 1
## 2279 119 19 1 1
## 2280 120 19 1 1
## 2281 1 20 1 1
## 2282 2 20 1 1
## 2283 3 20 1 1
## 2284 4 20 1 1
## 2285 5 20 1 1
## 2286 6 20 1 1
## 2287 7 20 1 1
## 2288 8 20 1 1
## 2289 9 20 1 1
## 2290 10 20 1 1
## 2291 11 20 1 1
## 2292 12 20 1 1
## 2293 13 20 1 1
## 2294 14 20 1 1
## 2295 15 20 1 1
## 2296 16 20 1 1
## 2297 17 20 1 1
## 2298 18 20 1 1
## 2299 19 20 1 1
## 2300 20 20 1 1
## 2301 21 20 1 1
## 2302 22 20 1 1
## 2303 23 20 1 1
## 2304 24 20 1 1
## 2305 25 20 1 1
## 2306 26 20 1 1
## 2307 27 20 1 1
## 2308 28 20 1 1
## 2309 29 20 1 1
## 2310 30 20 1 1
## 2311 31 20 1 1
## 2312 32 20 1 1
## 2313 33 20 1 1
## 2314 34 20 1 1
## 2315 35 20 1 1
## 2316 36 20 1 1
## 2317 37 20 1 1
## 2318 38 20 1 1
## 2319 39 20 1 1
## 2320 40 20 1 1
## 2321 41 20 1 1
## 2322 42 20 1 1
## 2323 43 20 1 1
## 2324 44 20 1 1
## 2325 45 20 1 1
## 2326 46 20 1 1
## 2327 47 20 1 1
## 2328 48 20 1 1
## 2329 49 20 1 1
## 2330 50 20 1 1
## 2331 51 20 1 1
## 2332 52 20 1 1
## 2333 53 20 1 1
## 2334 54 20 1 1
## 2335 55 20 1 1
## 2336 56 20 1 1
## 2337 57 20 1 1
## 2338 58 20 1 1
## 2339 59 20 1 1
## 2340 60 20 1 1
## 2341 61 20 1 1
## 2342 62 20 1 1
## 2343 63 20 1 1
## 2344 64 20 1 1
## 2345 65 20 1 1
## 2346 66 20 1 1
## 2347 67 20 1 1
## 2348 68 20 1 1
## 2349 69 20 1 1
## 2350 70 20 1 1
## 2351 71 20 1 1
## 2352 72 20 1 1
## 2353 73 20 1 1
## 2354 74 20 1 1
## 2355 75 20 1 1
## 2356 76 20 1 1
## 2357 77 20 1 1
## 2358 78 20 1 1
## 2359 79 20 1 1
## 2360 80 20 1 1
## 2361 81 20 1 1
## 2362 82 20 1 1
## 2363 83 20 1 1
## 2364 84 20 1 1
## 2365 85 20 1 1
## 2366 86 20 1 1
## 2367 87 20 1 1
## 2368 88 20 1 1
## 2369 89 20 1 1
## 2370 90 20 1 1
## 2371 91 20 1 1
## 2372 92 20 1 1
## 2373 93 20 1 1
## 2374 94 20 1 1
## 2375 95 20 1 1
## 2376 96 20 1 1
## 2377 97 20 1 1
## 2378 98 20 1 1
## 2379 99 20 1 1
## 2380 100 20 1 1
## 2381 101 20 1 1
## 2382 102 20 1 1
## 2383 103 20 1 1
## 2384 104 20 1 1
## 2385 105 20 1 1
## 2386 106 20 1 1
## 2387 107 20 1 1
## 2388 108 20 1 1
## 2389 109 20 1 1
## 2390 110 20 1 1
## 2391 111 20 1 1
## 2392 112 20 1 1
## 2393 113 20 1 1
## 2394 114 20 1 1
## 2395 115 20 1 1
## 2396 116 20 1 1
## 2397 117 20 1 1
## 2398 118 20 1 1
## 2399 119 20 1 1
## 2400 120 20 1 1
## 2401 1 21 1 1
## 2402 2 21 1 1
## 2403 3 21 1 1
## 2404 4 21 1 1
## 2405 5 21 1 1
## 2406 6 21 1 1
## 2407 7 21 1 1
## 2408 8 21 1 1
## 2409 9 21 1 1
## 2410 10 21 1 1
## 2411 11 21 1 1
## 2412 12 21 1 1
## 2413 13 21 1 1
## 2414 14 21 1 1
## 2415 15 21 1 1
## 2416 16 21 1 1
## 2417 17 21 1 1
## 2418 18 21 1 1
## 2419 19 21 1 1
## 2420 20 21 1 1
## 2421 21 21 1 1
## 2422 22 21 1 1
## 2423 23 21 1 1
## 2424 24 21 1 1
## 2425 25 21 1 1
## 2426 26 21 1 1
## 2427 27 21 1 1
## 2428 28 21 1 1
## 2429 29 21 1 1
## 2430 30 21 1 1
## 2431 31 21 1 1
## 2432 32 21 1 1
## 2433 33 21 1 1
## 2434 34 21 1 1
## 2435 35 21 1 1
## 2436 36 21 1 1
## 2437 37 21 1 1
## 2438 38 21 1 1
## 2439 39 21 1 1
## 2440 40 21 1 1
## 2441 41 21 1 1
## 2442 42 21 1 1
## 2443 43 21 1 1
## 2444 44 21 1 1
## 2445 45 21 1 1
## 2446 46 21 1 1
## 2447 47 21 1 1
## 2448 48 21 1 1
## 2449 49 21 1 1
## 2450 50 21 1 1
## 2451 51 21 1 1
## 2452 52 21 1 1
## 2453 53 21 1 1
## 2454 54 21 1 1
## 2455 55 21 1 1
## 2456 56 21 1 1
## 2457 57 21 1 1
## 2458 58 21 1 1
## 2459 59 21 1 1
## 2460 60 21 1 1
## 2461 61 21 1 1
## 2462 62 21 1 1
## 2463 63 21 1 1
## 2464 64 21 1 1
## 2465 65 21 1 1
## 2466 66 21 1 1
## 2467 67 21 1 1
## 2468 68 21 1 1
## 2469 69 21 1 1
## 2470 70 21 1 1
## 2471 71 21 1 1
## 2472 72 21 1 1
## 2473 73 21 1 1
## 2474 74 21 1 1
## 2475 75 21 1 1
## 2476 76 21 1 1
## 2477 77 21 1 1
## 2478 78 21 1 1
## 2479 79 21 1 1
## 2480 80 21 1 1
## 2481 81 21 1 1
## 2482 82 21 1 1
## 2483 83 21 1 1
## 2484 84 21 1 1
## 2485 85 21 1 1
## 2486 86 21 1 1
## 2487 87 21 1 1
## 2488 88 21 1 1
## 2489 89 21 1 1
## 2490 90 21 1 1
## 2491 91 21 1 1
## 2492 92 21 1 1
## 2493 93 21 1 1
## 2494 94 21 1 1
## 2495 95 21 1 1
## 2496 96 21 1 1
## 2497 97 21 1 1
## 2498 98 21 1 1
## 2499 99 21 1 1
## 2500 100 21 1 1
## 2501 101 21 1 1
## 2502 102 21 1 1
## 2503 103 21 1 1
## 2504 104 21 1 1
## 2505 105 21 1 1
## 2506 106 21 1 1
## 2507 107 21 1 1
## 2508 108 21 1 1
## 2509 109 21 1 1
## 2510 110 21 1 1
## 2511 111 21 1 1
## 2512 112 21 1 1
## 2513 113 21 1 1
## 2514 114 21 1 1
## 2515 115 21 1 1
## 2516 116 21 1 1
## 2517 117 21 1 1
## 2518 118 21 1 1
## 2519 119 21 1 1
## 2520 120 21 1 1
## 2521 1 22 1 1
## 2522 2 22 1 1
## 2523 3 22 1 1
## 2524 4 22 1 1
## 2525 5 22 1 1
## 2526 6 22 1 1
## 2527 7 22 1 1
## 2528 8 22 1 1
## 2529 9 22 1 1
## 2530 10 22 1 1
## 2531 11 22 1 1
## 2532 12 22 1 1
## 2533 13 22 1 1
## 2534 14 22 1 1
## 2535 15 22 1 1
## 2536 16 22 1 1
## 2537 17 22 1 1
## 2538 18 22 1 1
## 2539 19 22 1 1
## 2540 20 22 1 1
## 2541 21 22 1 1
## 2542 22 22 1 1
## 2543 23 22 1 1
## 2544 24 22 1 1
## 2545 25 22 1 1
## 2546 26 22 1 1
## 2547 27 22 1 1
## 2548 28 22 1 1
## 2549 29 22 1 1
## 2550 30 22 1 1
## 2551 31 22 1 1
## 2552 32 22 1 1
## 2553 33 22 1 1
## 2554 34 22 1 1
## 2555 35 22 1 1
## 2556 36 22 1 1
## 2557 37 22 1 1
## 2558 38 22 1 1
## 2559 39 22 1 1
## 2560 40 22 1 1
## 2561 41 22 1 1
## 2562 42 22 1 1
## 2563 43 22 1 1
## 2564 44 22 1 1
## 2565 45 22 1 1
## 2566 46 22 1 1
## 2567 47 22 1 1
## 2568 48 22 1 1
## 2569 49 22 1 1
## 2570 50 22 1 1
## 2571 51 22 1 1
## 2572 52 22 1 1
## 2573 53 22 1 1
## 2574 54 22 1 1
## 2575 55 22 1 1
## 2576 56 22 1 1
## 2577 57 22 1 1
## 2578 58 22 1 1
## 2579 59 22 1 1
## 2580 60 22 1 1
## 2581 61 22 1 1
## 2582 62 22 1 1
## 2583 63 22 1 1
## 2584 64 22 1 1
## 2585 65 22 1 1
## 2586 66 22 1 1
## 2587 67 22 1 1
## 2588 68 22 1 1
## 2589 69 22 1 1
## 2590 70 22 1 1
## 2591 71 22 1 1
## 2592 72 22 1 1
## 2593 73 22 1 1
## 2594 74 22 1 1
## 2595 75 22 1 1
## 2596 76 22 1 1
## 2597 77 22 1 1
## 2598 78 22 1 1
## 2599 79 22 1 1
## 2600 80 22 1 1
## 2601 81 22 1 1
## 2602 82 22 1 1
## 2603 83 22 1 1
## 2604 84 22 1 1
## 2605 85 22 1 1
## 2606 86 22 1 1
## 2607 87 22 1 1
## 2608 88 22 1 1
## 2609 89 22 1 1
## 2610 90 22 1 1
## 2611 91 22 1 1
## 2612 92 22 1 1
## 2613 93 22 1 1
## 2614 94 22 1 1
## 2615 95 22 1 1
## 2616 96 22 1 1
## 2617 97 22 1 1
## 2618 98 22 1 1
## 2619 99 22 1 1
## 2620 100 22 1 1
## 2621 101 22 1 1
## 2622 102 22 1 1
## 2623 103 22 1 1
## 2624 104 22 1 1
## 2625 105 22 1 1
## 2626 106 22 1 1
## 2627 107 22 1 1
## 2628 108 22 1 1
## 2629 109 22 1 1
## 2630 110 22 1 1
## 2631 111 22 1 1
## 2632 112 22 1 1
## 2633 113 22 1 1
## 2634 114 22 1 1
## 2635 115 22 1 1
## 2636 116 22 1 1
## 2637 117 22 1 1
## 2638 118 22 1 1
## 2639 119 22 1 1
## 2640 120 22 1 1
## 2641 1 23 1 1
## 2642 2 23 1 1
## 2643 3 23 1 1
## 2644 4 23 1 1
## 2645 5 23 1 1
## 2646 6 23 1 1
## 2647 7 23 1 1
## 2648 8 23 1 1
## 2649 9 23 1 1
## 2650 10 23 1 1
## 2651 11 23 1 1
## 2652 12 23 1 1
## 2653 13 23 1 1
## 2654 14 23 1 1
## 2655 15 23 1 1
## 2656 16 23 1 1
## 2657 17 23 1 1
## 2658 18 23 1 1
## 2659 19 23 1 1
## 2660 20 23 1 1
## 2661 21 23 1 1
## 2662 22 23 1 1
## 2663 23 23 1 1
## 2664 24 23 1 1
## 2665 25 23 1 1
## 2666 26 23 1 1
## 2667 27 23 1 1
## 2668 28 23 1 1
## 2669 29 23 1 1
## 2670 30 23 1 1
## 2671 31 23 1 1
## 2672 32 23 1 1
## 2673 33 23 1 1
## 2674 34 23 1 1
## 2675 35 23 1 1
## 2676 36 23 1 1
## 2677 37 23 1 1
## 2678 38 23 1 1
## 2679 39 23 1 1
## 2680 40 23 1 1
## 2681 41 23 1 1
## 2682 42 23 1 1
## 2683 43 23 1 1
## 2684 44 23 1 1
## 2685 45 23 1 1
## 2686 46 23 1 1
## 2687 47 23 1 1
## 2688 48 23 1 1
## 2689 49 23 1 1
## 2690 50 23 1 1
## 2691 51 23 1 1
## 2692 52 23 1 1
## 2693 53 23 1 1
## 2694 54 23 1 1
## 2695 55 23 1 1
## 2696 56 23 1 1
## 2697 57 23 1 1
## 2698 58 23 1 1
## 2699 59 23 1 1
## 2700 60 23 1 1
## 2701 61 23 1 1
## 2702 62 23 1 1
## 2703 63 23 1 1
## 2704 64 23 1 1
## 2705 65 23 1 1
## 2706 66 23 1 1
## 2707 67 23 1 1
## 2708 68 23 1 1
## 2709 69 23 1 1
## 2710 70 23 1 1
## 2711 71 23 1 1
## 2712 72 23 1 1
## 2713 73 23 1 1
## 2714 74 23 1 1
## 2715 75 23 1 1
## 2716 76 23 1 1
## 2717 77 23 1 1
## 2718 78 23 1 1
## 2719 79 23 1 1
## 2720 80 23 1 1
## 2721 81 23 1 1
## 2722 82 23 1 1
## 2723 83 23 1 1
## 2724 84 23 1 1
## 2725 85 23 1 1
## 2726 86 23 1 1
## 2727 87 23 1 1
## 2728 88 23 1 1
## 2729 89 23 1 1
## 2730 90 23 1 1
## 2731 91 23 1 1
## 2732 92 23 1 1
## 2733 93 23 1 1
## 2734 94 23 1 1
## 2735 95 23 1 1
## 2736 96 23 1 1
## 2737 97 23 1 1
## 2738 98 23 1 1
## 2739 99 23 1 1
## 2740 100 23 1 1
## 2741 101 23 1 1
## 2742 102 23 1 1
## 2743 103 23 1 1
## 2744 104 23 1 1
## 2745 105 23 1 1
## 2746 106 23 1 1
## 2747 107 23 1 1
## 2748 108 23 1 1
## 2749 109 23 1 1
## 2750 110 23 1 1
## 2751 111 23 1 1
## 2752 112 23 1 1
## 2753 113 23 1 1
## 2754 114 23 1 1
## 2755 115 23 1 1
## 2756 116 23 1 1
## 2757 117 23 1 1
## 2758 118 23 1 1
## 2759 119 23 1 1
## 2760 120 23 1 1
## 2761 1 24 1 1
## 2762 2 24 1 1
## 2763 3 24 1 1
## 2764 4 24 1 1
## 2765 5 24 1 1
## 2766 6 24 1 1
## 2767 7 24 1 1
## 2768 8 24 1 1
## 2769 9 24 1 1
## 2770 10 24 1 1
## 2771 11 24 1 1
## 2772 12 24 1 1
## 2773 13 24 1 1
## 2774 14 24 1 1
## 2775 15 24 1 1
## 2776 16 24 1 1
## 2777 17 24 1 1
## 2778 18 24 1 1
## 2779 19 24 1 1
## 2780 20 24 1 1
## 2781 21 24 1 1
## 2782 22 24 1 1
## 2783 23 24 1 1
## 2784 24 24 1 1
## 2785 25 24 1 1
## 2786 26 24 1 1
## 2787 27 24 1 1
## 2788 28 24 1 1
## 2789 29 24 1 1
## 2790 30 24 1 1
## 2791 31 24 1 1
## 2792 32 24 1 1
## 2793 33 24 1 1
## 2794 34 24 1 1
## 2795 35 24 1 1
## 2796 36 24 1 1
## 2797 37 24 1 1
## 2798 38 24 1 1
## 2799 39 24 1 1
## 2800 40 24 1 1
## 2801 41 24 1 1
## 2802 42 24 1 1
## 2803 43 24 1 1
## 2804 44 24 1 1
## 2805 45 24 1 1
## 2806 46 24 1 1
## 2807 47 24 1 1
## 2808 48 24 1 1
## 2809 49 24 1 1
## 2810 50 24 1 1
## 2811 51 24 1 1
## 2812 52 24 1 1
## 2813 53 24 1 1
## 2814 54 24 1 1
## 2815 55 24 1 1
## 2816 56 24 1 1
## 2817 57 24 1 1
## 2818 58 24 1 1
## 2819 59 24 1 1
## 2820 60 24 1 1
## 2821 61 24 1 1
## 2822 62 24 1 1
## 2823 63 24 1 1
## 2824 64 24 1 1
## 2825 65 24 1 1
## 2826 66 24 1 1
## 2827 67 24 1 1
## 2828 68 24 1 1
## 2829 69 24 1 1
## 2830 70 24 1 1
## 2831 71 24 1 1
## 2832 72 24 1 1
## 2833 73 24 1 1
## 2834 74 24 1 1
## 2835 75 24 1 1
## 2836 76 24 1 1
## 2837 77 24 1 1
## 2838 78 24 1 1
## 2839 79 24 1 1
## 2840 80 24 1 1
## 2841 81 24 1 1
## 2842 82 24 1 1
## 2843 83 24 1 1
## 2844 84 24 1 1
## 2845 85 24 1 1
## 2846 86 24 1 1
## 2847 87 24 1 1
## 2848 88 24 1 1
## 2849 89 24 1 1
## 2850 90 24 1 1
## 2851 91 24 1 1
## 2852 92 24 1 1
## 2853 93 24 1 1
## 2854 94 24 1 1
## 2855 95 24 1 1
## 2856 96 24 1 1
## 2857 97 24 1 1
## 2858 98 24 1 1
## 2859 99 24 1 1
## 2860 100 24 1 1
## 2861 101 24 1 1
## 2862 102 24 1 1
## 2863 103 24 1 1
## 2864 104 24 1 1
## 2865 105 24 1 1
## 2866 106 24 1 1
## 2867 107 24 1 1
## 2868 108 24 1 1
## 2869 109 24 1 1
## 2870 110 24 1 1
## 2871 111 24 1 1
## 2872 112 24 1 1
## 2873 113 24 1 1
## 2874 114 24 1 1
## 2875 115 24 1 1
## 2876 116 24 1 1
## 2877 117 24 1 1
## 2878 118 24 1 1
## 2879 119 24 1 1
## 2880 120 24 1 1
## 2881 1 25 1 1
## 2882 2 25 1 1
## 2883 3 25 1 1
## 2884 4 25 1 1
## 2885 5 25 1 1
## 2886 6 25 1 1
## 2887 7 25 1 1
## 2888 8 25 1 1
## 2889 9 25 1 1
## 2890 10 25 1 1
## 2891 11 25 1 1
## 2892 12 25 1 1
## 2893 13 25 1 1
## 2894 14 25 1 1
## 2895 15 25 1 1
## 2896 16 25 1 1
## 2897 17 25 1 1
## 2898 18 25 1 1
## 2899 19 25 1 1
## 2900 20 25 1 1
## 2901 21 25 1 1
## 2902 22 25 1 1
## 2903 23 25 1 1
## 2904 24 25 1 1
## 2905 25 25 1 1
## 2906 26 25 1 1
## 2907 27 25 1 1
## 2908 28 25 1 1
## 2909 29 25 1 1
## 2910 30 25 1 1
## 2911 31 25 1 1
## 2912 32 25 1 1
## 2913 33 25 1 1
## 2914 34 25 1 1
## 2915 35 25 1 1
## 2916 36 25 1 1
## 2917 37 25 1 1
## 2918 38 25 1 1
## 2919 39 25 1 1
## 2920 40 25 1 1
## 2921 41 25 1 1
## 2922 42 25 1 1
## 2923 43 25 1 1
## 2924 44 25 1 1
## 2925 45 25 1 1
## 2926 46 25 1 1
## 2927 47 25 1 1
## 2928 48 25 1 1
## 2929 49 25 1 1
## 2930 50 25 1 1
## 2931 51 25 1 1
## 2932 52 25 1 1
## 2933 53 25 1 1
## 2934 54 25 1 1
## 2935 55 25 1 1
## 2936 56 25 1 1
## 2937 57 25 1 1
## 2938 58 25 1 1
## 2939 59 25 1 1
## 2940 60 25 1 1
## 2941 61 25 1 1
## 2942 62 25 1 1
## 2943 63 25 1 1
## 2944 64 25 1 1
## 2945 65 25 1 1
## 2946 66 25 1 1
## 2947 67 25 1 1
## 2948 68 25 1 1
## 2949 69 25 1 1
## 2950 70 25 1 1
## 2951 71 25 1 1
## 2952 72 25 1 1
## 2953 73 25 1 1
## 2954 74 25 1 1
## 2955 75 25 1 1
## 2956 76 25 1 1
## 2957 77 25 1 1
## 2958 78 25 1 1
## 2959 79 25 1 1
## 2960 80 25 1 1
## 2961 81 25 1 1
## 2962 82 25 1 1
## 2963 83 25 1 1
## 2964 84 25 1 1
## 2965 85 25 1 1
## 2966 86 25 1 1
## 2967 87 25 1 1
## 2968 88 25 1 1
## 2969 89 25 1 1
## 2970 90 25 1 1
## 2971 91 25 1 1
## 2972 92 25 1 1
## 2973 93 25 1 1
## 2974 94 25 1 1
## 2975 95 25 1 1
## 2976 96 25 1 1
## 2977 97 25 1 1
## 2978 98 25 1 1
## 2979 99 25 1 1
## 2980 100 25 1 1
## 2981 101 25 1 1
## 2982 102 25 1 1
## 2983 103 25 1 1
## 2984 104 25 1 1
## 2985 105 25 1 1
## 2986 106 25 1 1
## 2987 107 25 1 1
## 2988 108 25 1 1
## 2989 109 25 1 1
## 2990 110 25 1 1
## 2991 111 25 1 1
## 2992 112 25 1 1
## 2993 113 25 1 1
## 2994 114 25 1 1
## 2995 115 25 1 1
## 2996 116 25 1 1
## 2997 117 25 1 1
## 2998 118 25 1 1
## 2999 119 25 1 1
## 3000 120 25 1 1
## 3001 1 26 1 1
## 3002 2 26 1 1
## 3003 3 26 1 1
## 3004 4 26 1 1
## 3005 5 26 1 1
## 3006 6 26 1 1
## 3007 7 26 1 1
## 3008 8 26 1 1
## 3009 9 26 1 1
## 3010 10 26 1 1
## 3011 11 26 1 1
## 3012 12 26 1 1
## 3013 13 26 1 1
## 3014 14 26 1 1
## 3015 15 26 1 1
## 3016 16 26 1 1
## 3017 17 26 1 1
## 3018 18 26 1 1
## 3019 19 26 1 1
## 3020 20 26 1 1
## 3021 21 26 1 1
## 3022 22 26 1 1
## 3023 23 26 1 1
## 3024 24 26 1 1
## 3025 25 26 1 1
## 3026 26 26 1 1
## 3027 27 26 1 1
## 3028 28 26 1 1
## 3029 29 26 1 1
## 3030 30 26 1 1
## 3031 31 26 1 1
## 3032 32 26 1 1
## 3033 33 26 1 1
## 3034 34 26 1 1
## 3035 35 26 1 1
## 3036 36 26 1 1
## 3037 37 26 1 1
## 3038 38 26 1 1
## 3039 39 26 1 1
## 3040 40 26 1 1
## 3041 41 26 1 1
## 3042 42 26 1 1
## 3043 43 26 1 1
## 3044 44 26 1 1
## 3045 45 26 1 1
## 3046 46 26 1 1
## 3047 47 26 1 1
## 3048 48 26 1 1
## 3049 49 26 1 1
## 3050 50 26 1 1
## 3051 51 26 1 1
## 3052 52 26 1 1
## 3053 53 26 1 1
## 3054 54 26 1 1
## 3055 55 26 1 1
## 3056 56 26 1 1
## 3057 57 26 1 1
## 3058 58 26 1 1
## 3059 59 26 1 1
## 3060 60 26 1 1
## 3061 61 26 1 1
## 3062 62 26 1 1
## 3063 63 26 1 1
## 3064 64 26 1 1
## 3065 65 26 1 1
## 3066 66 26 1 1
## 3067 67 26 1 1
## 3068 68 26 1 1
## 3069 69 26 1 1
## 3070 70 26 1 1
## 3071 71 26 1 1
## 3072 72 26 1 1
## 3073 73 26 1 1
## 3074 74 26 1 1
## 3075 75 26 1 1
## 3076 76 26 1 1
## 3077 77 26 1 1
## 3078 78 26 1 1
## 3079 79 26 1 1
## 3080 80 26 1 1
## 3081 81 26 1 1
## 3082 82 26 1 1
## 3083 83 26 1 1
## 3084 84 26 1 1
## 3085 85 26 1 1
## 3086 86 26 1 1
## 3087 87 26 1 1
## 3088 88 26 1 1
## 3089 89 26 1 1
## 3090 90 26 1 1
## 3091 91 26 1 1
## 3092 92 26 1 1
## 3093 93 26 1 1
## 3094 94 26 1 1
## 3095 95 26 1 1
## 3096 96 26 1 1
## 3097 97 26 1 1
## 3098 98 26 1 1
## 3099 99 26 1 1
## 3100 100 26 1 1
## 3101 101 26 1 1
## 3102 102 26 1 1
## 3103 103 26 1 1
## 3104 104 26 1 1
## 3105 105 26 1 1
## 3106 106 26 1 1
## 3107 107 26 1 1
## 3108 108 26 1 1
## 3109 109 26 1 1
## 3110 110 26 1 1
## 3111 111 26 1 1
## 3112 112 26 1 1
## 3113 113 26 1 1
## 3114 114 26 1 1
## 3115 115 26 1 1
## 3116 116 26 1 1
## 3117 117 26 1 1
## 3118 118 26 1 1
## 3119 119 26 1 1
## 3120 120 26 1 1
## 3121 1 27 1 1
## 3122 2 27 1 1
## 3123 3 27 1 1
## 3124 4 27 1 1
## 3125 5 27 1 1
## 3126 6 27 1 1
## 3127 7 27 1 1
## 3128 8 27 1 1
## 3129 9 27 1 1
## 3130 10 27 1 1
## 3131 11 27 1 1
## 3132 12 27 1 1
## 3133 13 27 1 1
## 3134 14 27 1 1
## 3135 15 27 1 1
## 3136 16 27 1 1
## 3137 17 27 1 1
## 3138 18 27 1 1
## 3139 19 27 1 1
## 3140 20 27 1 1
## 3141 21 27 1 1
## 3142 22 27 1 1
## 3143 23 27 1 1
## 3144 24 27 1 1
## 3145 25 27 1 1
## 3146 26 27 1 1
## 3147 27 27 1 1
## 3148 28 27 1 1
## 3149 29 27 1 1
## 3150 30 27 1 1
## 3151 31 27 1 1
## 3152 32 27 1 1
## 3153 33 27 1 1
## 3154 34 27 1 1
## 3155 35 27 1 1
## 3156 36 27 1 1
## 3157 37 27 1 1
## 3158 38 27 1 1
## 3159 39 27 1 1
## 3160 40 27 1 1
## 3161 41 27 1 1
## 3162 42 27 1 1
## 3163 43 27 1 1
## 3164 44 27 1 1
## 3165 45 27 1 1
## 3166 46 27 1 1
## 3167 47 27 1 1
## 3168 48 27 1 1
## 3169 49 27 1 1
## 3170 50 27 1 1
## 3171 51 27 1 1
## 3172 52 27 1 1
## 3173 53 27 1 1
## 3174 54 27 1 1
## 3175 55 27 1 1
## 3176 56 27 1 1
## 3177 57 27 1 1
## 3178 58 27 1 1
## 3179 59 27 1 1
## 3180 60 27 1 1
## 3181 61 27 1 1
## 3182 62 27 1 1
## 3183 63 27 1 1
## 3184 64 27 1 1
## 3185 65 27 1 1
## 3186 66 27 1 1
## 3187 67 27 1 1
## 3188 68 27 1 1
## 3189 69 27 1 1
## 3190 70 27 1 1
## 3191 71 27 1 1
## 3192 72 27 1 1
## 3193 73 27 1 1
## 3194 74 27 1 1
## 3195 75 27 1 1
## 3196 76 27 1 1
## 3197 77 27 1 1
## 3198 78 27 1 1
## 3199 79 27 1 1
## 3200 80 27 1 1
## 3201 81 27 1 1
## 3202 82 27 1 1
## 3203 83 27 1 1
## 3204 84 27 1 1
## 3205 85 27 1 1
## 3206 86 27 1 1
## 3207 87 27 1 1
## 3208 88 27 1 1
## 3209 89 27 1 1
## 3210 90 27 1 1
## 3211 91 27 1 1
## 3212 92 27 1 1
## 3213 93 27 1 1
## 3214 94 27 1 1
## 3215 95 27 1 1
## 3216 96 27 1 1
## 3217 97 27 1 1
## 3218 98 27 1 1
## 3219 99 27 1 1
## 3220 100 27 1 1
## 3221 101 27 1 1
## 3222 102 27 1 1
## 3223 103 27 1 1
## 3224 104 27 1 1
## 3225 105 27 1 1
## 3226 106 27 1 1
## 3227 107 27 1 1
## 3228 108 27 1 1
## 3229 109 27 1 1
## 3230 110 27 1 1
## 3231 111 27 1 1
## 3232 112 27 1 1
## 3233 113 27 1 1
## 3234 114 27 1 1
## 3235 115 27 1 1
## 3236 116 27 1 1
## 3237 117 27 1 1
## 3238 118 27 1 1
## 3239 119 27 1 1
## 3240 120 27 1 1
## 3241 1 28 1 1
## 3242 2 28 1 1
## 3243 3 28 1 1
## 3244 4 28 1 1
## 3245 5 28 1 1
## 3246 6 28 1 1
## 3247 7 28 1 1
## 3248 8 28 1 1
## 3249 9 28 1 1
## 3250 10 28 1 1
## 3251 11 28 1 1
## 3252 12 28 1 1
## 3253 13 28 1 1
## 3254 14 28 1 1
## 3255 15 28 1 1
## 3256 16 28 1 1
## 3257 17 28 1 1
## 3258 18 28 1 1
## 3259 19 28 1 1
## 3260 20 28 1 1
## 3261 21 28 1 1
## 3262 22 28 1 1
## 3263 23 28 1 1
## 3264 24 28 1 1
## 3265 25 28 1 1
## 3266 26 28 1 1
## 3267 27 28 1 1
## 3268 28 28 1 1
## 3269 29 28 1 1
## 3270 30 28 1 1
## 3271 31 28 1 1
## 3272 32 28 1 1
## 3273 33 28 1 1
## 3274 34 28 1 1
## 3275 35 28 1 1
## 3276 36 28 1 1
## 3277 37 28 1 1
## 3278 38 28 1 1
## 3279 39 28 1 1
## 3280 40 28 1 1
## 3281 41 28 1 1
## 3282 42 28 1 1
## 3283 43 28 1 1
## 3284 44 28 1 1
## 3285 45 28 1 1
## 3286 46 28 1 1
## 3287 47 28 1 1
## 3288 48 28 1 1
## 3289 49 28 1 1
## 3290 50 28 1 1
## 3291 51 28 1 1
## 3292 52 28 1 1
## 3293 53 28 1 1
## 3294 54 28 1 1
## 3295 55 28 1 1
## 3296 56 28 1 1
## 3297 57 28 1 1
## 3298 58 28 1 1
## 3299 59 28 1 1
## 3300 60 28 1 1
## 3301 61 28 1 1
## 3302 62 28 1 1
## 3303 63 28 1 1
## 3304 64 28 1 1
## 3305 65 28 1 1
## 3306 66 28 1 1
## 3307 67 28 1 1
## 3308 68 28 1 1
## 3309 69 28 1 1
## 3310 70 28 1 1
## 3311 71 28 1 1
## 3312 72 28 1 1
## 3313 73 28 1 1
## 3314 74 28 1 1
## 3315 75 28 1 1
## 3316 76 28 1 1
## 3317 77 28 1 1
## 3318 78 28 1 1
## 3319 79 28 1 1
## 3320 80 28 1 1
## 3321 81 28 1 1
## 3322 82 28 1 1
## 3323 83 28 1 1
## 3324 84 28 1 1
## 3325 85 28 1 1
## 3326 86 28 1 1
## 3327 87 28 1 1
## 3328 88 28 1 1
## 3329 89 28 1 1
## 3330 90 28 1 1
## 3331 91 28 1 1
## 3332 92 28 1 1
## 3333 93 28 1 1
## 3334 94 28 1 1
## 3335 95 28 1 1
## 3336 96 28 1 1
## 3337 97 28 1 1
## 3338 98 28 1 1
## 3339 99 28 1 1
## 3340 100 28 1 1
## 3341 101 28 1 1
## 3342 102 28 1 1
## 3343 103 28 1 1
## 3344 104 28 1 1
## 3345 105 28 1 1
## 3346 106 28 1 1
## 3347 107 28 1 1
## 3348 108 28 1 1
## 3349 109 28 1 1
## 3350 110 28 1 1
## 3351 111 28 1 1
## 3352 112 28 1 1
## 3353 113 28 1 1
## 3354 114 28 1 1
## 3355 115 28 1 1
## 3356 116 28 1 1
## 3357 117 28 1 1
## 3358 118 28 1 1
## 3359 119 28 1 1
## 3360 120 28 1 1
## 3361 1 29 1 1
## 3362 2 29 1 1
## 3363 3 29 1 1
## 3364 4 29 1 1
## 3365 5 29 1 1
## 3366 6 29 1 1
## 3367 7 29 1 1
## 3368 8 29 1 1
## 3369 9 29 1 1
## 3370 10 29 1 1
## 3371 11 29 1 1
## 3372 12 29 1 1
## 3373 13 29 1 1
## 3374 14 29 1 1
## 3375 15 29 1 1
## 3376 16 29 1 1
## 3377 17 29 1 1
## 3378 18 29 1 1
## 3379 19 29 1 1
## 3380 20 29 1 1
## 3381 21 29 1 1
## 3382 22 29 1 1
## 3383 23 29 1 1
## 3384 24 29 1 1
## 3385 25 29 1 1
## 3386 26 29 1 1
## 3387 27 29 1 1
## 3388 28 29 1 1
## 3389 29 29 1 1
## 3390 30 29 1 1
## 3391 31 29 1 1
## 3392 32 29 1 1
## 3393 33 29 1 1
## 3394 34 29 1 1
## 3395 35 29 1 1
## 3396 36 29 1 1
## 3397 37 29 1 1
## 3398 38 29 1 1
## 3399 39 29 1 1
## 3400 40 29 1 1
## 3401 41 29 1 1
## 3402 42 29 1 1
## 3403 43 29 1 1
## 3404 44 29 1 1
## 3405 45 29 1 1
## 3406 46 29 1 1
## 3407 47 29 1 1
## 3408 48 29 1 1
## 3409 49 29 1 1
## 3410 50 29 1 1
## 3411 51 29 1 1
## 3412 52 29 1 1
## 3413 53 29 1 1
## 3414 54 29 1 1
## 3415 55 29 1 1
## 3416 56 29 1 1
## 3417 57 29 1 1
## 3418 58 29 1 1
## 3419 59 29 1 1
## 3420 60 29 1 1
## 3421 61 29 1 1
## 3422 62 29 1 1
## 3423 63 29 1 1
## 3424 64 29 1 1
## 3425 65 29 1 1
## 3426 66 29 1 1
## 3427 67 29 1 1
## 3428 68 29 1 1
## 3429 69 29 1 1
## 3430 70 29 1 1
## 3431 71 29 1 1
## 3432 72 29 1 1
## 3433 73 29 1 1
## 3434 74 29 1 1
## 3435 75 29 1 1
## 3436 76 29 1 1
## 3437 77 29 1 1
## 3438 78 29 1 1
## 3439 79 29 1 1
## 3440 80 29 1 1
## 3441 81 29 1 1
## 3442 82 29 1 1
## 3443 83 29 1 1
## 3444 84 29 1 1
## 3445 85 29 1 1
## 3446 86 29 1 1
## 3447 87 29 1 1
## 3448 88 29 1 1
## 3449 89 29 1 1
## 3450 90 29 1 1
## 3451 91 29 1 1
## 3452 92 29 1 1
## 3453 93 29 1 1
## 3454 94 29 1 1
## 3455 95 29 1 1
## 3456 96 29 1 1
## 3457 97 29 1 1
## 3458 98 29 1 1
## 3459 99 29 1 1
## 3460 100 29 1 1
## 3461 101 29 1 1
## 3462 102 29 1 1
## 3463 103 29 1 1
## 3464 104 29 1 1
## 3465 105 29 1 1
## 3466 106 29 1 1
## 3467 107 29 1 1
## 3468 108 29 1 1
## 3469 109 29 1 1
## 3470 110 29 1 1
## 3471 111 29 1 1
## 3472 112 29 1 1
## 3473 113 29 1 1
## 3474 114 29 1 1
## 3475 115 29 1 1
## 3476 116 29 1 1
## 3477 117 29 1 1
## 3478 118 29 1 1
## 3479 119 29 1 1
## 3480 120 29 1 1
## 3481 1 30 1 1
## 3482 2 30 1 1
## 3483 3 30 1 1
## 3484 4 30 1 1
## 3485 5 30 1 1
## 3486 6 30 1 1
## 3487 7 30 1 1
## 3488 8 30 1 1
## 3489 9 30 1 1
## 3490 10 30 1 1
## 3491 11 30 1 1
## 3492 12 30 1 1
## 3493 13 30 1 1
## 3494 14 30 1 1
## 3495 15 30 1 1
## 3496 16 30 1 1
## 3497 17 30 1 1
## 3498 18 30 1 1
## 3499 19 30 1 1
## 3500 20 30 1 1
## 3501 21 30 1 1
## 3502 22 30 1 1
## 3503 23 30 1 1
## 3504 24 30 1 1
## 3505 25 30 1 1
## 3506 26 30 1 1
## 3507 27 30 1 1
## 3508 28 30 1 1
## 3509 29 30 1 1
## 3510 30 30 1 1
## 3511 31 30 1 1
## 3512 32 30 1 1
## 3513 33 30 1 1
## 3514 34 30 1 1
## 3515 35 30 1 1
## 3516 36 30 1 1
## 3517 37 30 1 1
## 3518 38 30 1 1
## 3519 39 30 1 1
## 3520 40 30 1 1
## 3521 41 30 1 1
## 3522 42 30 1 1
## 3523 43 30 1 1
## 3524 44 30 1 1
## 3525 45 30 1 1
## 3526 46 30 1 1
## 3527 47 30 1 1
## 3528 48 30 1 1
## 3529 49 30 1 1
## 3530 50 30 1 1
## 3531 51 30 1 1
## 3532 52 30 1 1
## 3533 53 30 1 1
## 3534 54 30 1 1
## 3535 55 30 1 1
## 3536 56 30 1 1
## 3537 57 30 1 1
## 3538 58 30 1 1
## 3539 59 30 1 1
## 3540 60 30 1 1
## 3541 61 30 1 1
## 3542 62 30 1 1
## 3543 63 30 1 1
## 3544 64 30 1 1
## 3545 65 30 1 1
## 3546 66 30 1 1
## 3547 67 30 1 1
## 3548 68 30 1 1
## 3549 69 30 1 1
## 3550 70 30 1 1
## 3551 71 30 1 1
## 3552 72 30 1 1
## 3553 73 30 1 1
## 3554 74 30 1 1
## 3555 75 30 1 1
## 3556 76 30 1 1
## 3557 77 30 1 1
## 3558 78 30 1 1
## 3559 79 30 1 1
## 3560 80 30 1 1
## 3561 81 30 1 1
## 3562 82 30 1 1
## 3563 83 30 1 1
## 3564 84 30 1 1
## 3565 85 30 1 1
## 3566 86 30 1 1
## 3567 87 30 1 1
## 3568 88 30 1 1
## 3569 89 30 1 1
## 3570 90 30 1 1
## 3571 91 30 1 1
## 3572 92 30 1 1
## 3573 93 30 1 1
## 3574 94 30 1 1
## 3575 95 30 1 1
## 3576 96 30 1 1
## 3577 97 30 1 1
## 3578 98 30 1 1
## 3579 99 30 1 1
## 3580 100 30 1 1
## 3581 101 30 1 1
## 3582 102 30 1 1
## 3583 103 30 1 1
## 3584 104 30 1 1
## 3585 105 30 1 1
## 3586 106 30 1 1
## 3587 107 30 1 1
## 3588 108 30 1 1
## 3589 109 30 1 1
## 3590 110 30 1 1
## 3591 111 30 1 1
## 3592 112 30 1 1
## 3593 113 30 1 1
## 3594 114 30 1 1
## 3595 115 30 1 1
## 3596 116 30 1 1
## 3597 117 30 1 1
## 3598 118 30 1 1
## 3599 119 30 1 1
## 3600 120 30 1 1
## 3601 1 31 1 1
## 3602 2 31 1 1
## 3603 3 31 1 1
## 3604 4 31 1 1
## 3605 5 31 1 1
## 3606 6 31 1 1
## 3607 7 31 1 1
## 3608 8 31 1 1
## 3609 9 31 1 1
## 3610 10 31 1 1
## 3611 11 31 1 1
## 3612 12 31 1 1
## 3613 13 31 1 1
## 3614 14 31 1 1
## 3615 15 31 1 1
## 3616 16 31 1 1
## 3617 17 31 1 1
## 3618 18 31 1 1
## 3619 19 31 1 1
## 3620 20 31 1 1
## 3621 21 31 1 1
## 3622 22 31 1 1
## 3623 23 31 1 1
## 3624 24 31 1 1
## 3625 25 31 1 1
## 3626 26 31 1 1
## 3627 27 31 1 1
## 3628 28 31 1 1
## 3629 29 31 1 1
## 3630 30 31 1 1
## 3631 31 31 1 1
## 3632 32 31 1 1
## 3633 33 31 1 1
## 3634 34 31 1 1
## 3635 35 31 1 1
## 3636 36 31 1 1
## 3637 37 31 1 1
## 3638 38 31 1 1
## 3639 39 31 1 1
## 3640 40 31 1 1
## 3641 41 31 1 1
## 3642 42 31 1 1
## 3643 43 31 1 1
## 3644 44 31 1 1
## 3645 45 31 1 1
## 3646 46 31 1 1
## 3647 47 31 1 1
## 3648 48 31 1 1
## 3649 49 31 1 1
## 3650 50 31 1 1
## 3651 51 31 1 1
## 3652 52 31 1 1
## 3653 53 31 1 1
## 3654 54 31 1 1
## 3655 55 31 1 1
## 3656 56 31 1 1
## 3657 57 31 1 1
## 3658 58 31 1 1
## 3659 59 31 1 1
## 3660 60 31 1 1
## 3661 61 31 1 1
## 3662 62 31 1 1
## 3663 63 31 1 1
## 3664 64 31 1 1
## 3665 65 31 1 1
## 3666 66 31 1 1
## 3667 67 31 1 1
## 3668 68 31 1 1
## 3669 69 31 1 1
## 3670 70 31 1 1
## 3671 71 31 1 1
## 3672 72 31 1 1
## 3673 73 31 1 1
## 3674 74 31 1 1
## 3675 75 31 1 1
## 3676 76 31 1 1
## 3677 77 31 1 1
## 3678 78 31 1 1
## 3679 79 31 1 1
## 3680 80 31 1 1
## 3681 81 31 1 1
## 3682 82 31 1 1
## 3683 83 31 1 1
## 3684 84 31 1 1
## 3685 85 31 1 1
## 3686 86 31 1 1
## 3687 87 31 1 1
## 3688 88 31 1 1
## 3689 89 31 1 1
## 3690 90 31 1 1
## 3691 91 31 1 1
## 3692 92 31 1 1
## 3693 93 31 1 1
## 3694 94 31 1 1
## 3695 95 31 1 1
## 3696 96 31 1 1
## 3697 97 31 1 1
## 3698 98 31 1 1
## 3699 99 31 1 1
## 3700 100 31 1 1
## 3701 101 31 1 1
## 3702 102 31 1 1
## 3703 103 31 1 1
## 3704 104 31 1 1
## 3705 105 31 1 1
## 3706 106 31 1 1
## 3707 107 31 1 1
## 3708 108 31 1 1
## 3709 109 31 1 1
## 3710 110 31 1 1
## 3711 111 31 1 1
## 3712 112 31 1 1
## 3713 113 31 1 1
## 3714 114 31 1 1
## 3715 115 31 1 1
## 3716 116 31 1 1
## 3717 117 31 1 1
## 3718 118 31 1 1
## 3719 119 31 1 1
## 3720 120 31 1 1
## 3721 1 32 1 1
## 3722 2 32 1 1
## 3723 3 32 1 1
## 3724 4 32 1 1
## 3725 5 32 1 1
## 3726 6 32 1 1
## 3727 7 32 1 1
## 3728 8 32 1 1
## 3729 9 32 1 1
## 3730 10 32 1 1
## 3731 11 32 1 1
## 3732 12 32 1 1
## 3733 13 32 1 1
## 3734 14 32 1 1
## 3735 15 32 1 1
## 3736 16 32 1 1
## 3737 17 32 1 1
## 3738 18 32 1 1
## 3739 19 32 1 1
## 3740 20 32 1 1
## 3741 21 32 1 1
## 3742 22 32 1 1
## 3743 23 32 1 1
## 3744 24 32 1 1
## 3745 25 32 1 1
## 3746 26 32 1 1
## 3747 27 32 1 1
## 3748 28 32 1 1
## 3749 29 32 1 1
## 3750 30 32 1 1
## 3751 31 32 1 1
## 3752 32 32 1 1
## 3753 33 32 1 1
## 3754 34 32 1 1
## 3755 35 32 1 1
## 3756 36 32 1 1
## 3757 37 32 1 1
## 3758 38 32 1 1
## 3759 39 32 1 1
## 3760 40 32 1 1
## 3761 41 32 1 1
## 3762 42 32 1 1
## 3763 43 32 1 1
## 3764 44 32 1 1
## 3765 45 32 1 1
## 3766 46 32 1 1
## 3767 47 32 1 1
## 3768 48 32 1 1
## 3769 49 32 1 1
## 3770 50 32 1 1
## 3771 51 32 1 1
## 3772 52 32 1 1
## 3773 53 32 1 1
## 3774 54 32 1 1
## 3775 55 32 1 1
## 3776 56 32 1 1
## 3777 57 32 1 1
## 3778 58 32 1 1
## 3779 59 32 1 1
## 3780 60 32 1 1
## 3781 61 32 1 1
## 3782 62 32 1 1
## 3783 63 32 1 1
## 3784 64 32 1 1
## 3785 65 32 1 1
## 3786 66 32 1 1
## 3787 67 32 1 1
## 3788 68 32 1 1
## 3789 69 32 1 1
## 3790 70 32 1 1
## 3791 71 32 1 1
## 3792 72 32 1 1
## 3793 73 32 1 1
## 3794 74 32 1 1
## 3795 75 32 1 1
## 3796 76 32 1 1
## 3797 77 32 1 1
## 3798 78 32 1 1
## 3799 79 32 1 1
## 3800 80 32 1 1
## 3801 81 32 1 1
## 3802 82 32 1 1
## 3803 83 32 1 1
## 3804 84 32 1 1
## 3805 85 32 1 1
## 3806 86 32 1 1
## 3807 87 32 1 1
## 3808 88 32 1 1
## 3809 89 32 1 1
## 3810 90 32 1 1
## 3811 91 32 1 1
## 3812 92 32 1 1
## 3813 93 32 1 1
## 3814 94 32 1 1
## 3815 95 32 1 1
## 3816 96 32 1 1
## 3817 97 32 1 1
## 3818 98 32 1 1
## 3819 99 32 1 1
## 3820 100 32 1 1
## 3821 101 32 1 1
## 3822 102 32 1 1
## 3823 103 32 1 1
## 3824 104 32 1 1
## 3825 105 32 1 1
## 3826 106 32 1 1
## 3827 107 32 1 1
## 3828 108 32 1 1
## 3829 109 32 1 1
## 3830 110 32 1 1
## 3831 111 32 1 1
## 3832 112 32 1 1
## 3833 113 32 1 1
## 3834 114 32 1 1
## 3835 115 32 1 1
## 3836 116 32 1 1
## 3837 117 32 1 1
## 3838 118 32 1 1
## 3839 119 32 1 1
## 3840 120 32 1 1
## 3841 1 33 1 1
## 3842 2 33 1 1
## 3843 3 33 1 1
## 3844 4 33 1 1
## 3845 5 33 1 1
## 3846 6 33 1 1
## 3847 7 33 1 1
## 3848 8 33 1 1
## 3849 9 33 1 1
## 3850 10 33 1 1
## 3851 11 33 1 1
## 3852 12 33 1 1
## 3853 13 33 1 1
## 3854 14 33 1 1
## 3855 15 33 1 1
## 3856 16 33 1 1
## 3857 17 33 1 1
## 3858 18 33 1 1
## 3859 19 33 1 1
## 3860 20 33 1 1
## 3861 21 33 1 1
## 3862 22 33 1 1
## 3863 23 33 1 1
## 3864 24 33 1 1
## 3865 25 33 1 1
## 3866 26 33 1 1
## 3867 27 33 1 1
## 3868 28 33 1 1
## 3869 29 33 1 1
## 3870 30 33 1 1
## 3871 31 33 1 1
## 3872 32 33 1 1
## 3873 33 33 1 1
## 3874 34 33 1 1
## 3875 35 33 1 1
## 3876 36 33 1 1
## 3877 37 33 1 1
## 3878 38 33 1 1
## 3879 39 33 1 1
## 3880 40 33 1 1
## 3881 41 33 1 1
## 3882 42 33 1 1
## 3883 43 33 1 1
## 3884 44 33 1 1
## 3885 45 33 1 1
## 3886 46 33 1 1
## 3887 47 33 1 1
## 3888 48 33 1 1
## 3889 49 33 1 1
## 3890 50 33 1 1
## 3891 51 33 1 1
## 3892 52 33 1 1
## 3893 53 33 1 1
## 3894 54 33 1 1
## 3895 55 33 1 1
## 3896 56 33 1 1
## 3897 57 33 1 1
## 3898 58 33 1 1
## 3899 59 33 1 1
## 3900 60 33 1 1
## 3901 61 33 1 1
## 3902 62 33 1 1
## 3903 63 33 1 1
## 3904 64 33 1 1
## 3905 65 33 1 1
## 3906 66 33 1 1
## 3907 67 33 1 1
## 3908 68 33 1 1
## 3909 69 33 1 1
## 3910 70 33 1 1
## 3911 71 33 1 1
## 3912 72 33 1 1
## 3913 73 33 1 1
## 3914 74 33 1 1
## 3915 75 33 1 1
## 3916 76 33 1 1
## 3917 77 33 1 1
## 3918 78 33 1 1
## 3919 79 33 1 1
## 3920 80 33 1 1
## 3921 81 33 1 1
## 3922 82 33 1 1
## 3923 83 33 1 1
## 3924 84 33 1 1
## 3925 85 33 1 1
## 3926 86 33 1 1
## 3927 87 33 1 1
## 3928 88 33 1 1
## 3929 89 33 1 1
## 3930 90 33 1 1
## 3931 91 33 1 1
## 3932 92 33 1 1
## 3933 93 33 1 1
## 3934 94 33 1 1
## 3935 95 33 1 1
## 3936 96 33 1 1
## 3937 97 33 1 1
## 3938 98 33 1 1
## 3939 99 33 1 1
## 3940 100 33 1 1
## 3941 101 33 1 1
## 3942 102 33 1 1
## 3943 103 33 1 1
## 3944 104 33 1 1
## 3945 105 33 1 1
## 3946 106 33 1 1
## 3947 107 33 1 1
## 3948 108 33 1 1
## 3949 109 33 1 1
## 3950 110 33 1 1
## 3951 111 33 1 1
## 3952 112 33 1 1
## 3953 113 33 1 1
## 3954 114 33 1 1
## 3955 115 33 1 1
## 3956 116 33 1 1
## 3957 117 33 1 1
## 3958 118 33 1 1
## 3959 119 33 1 1
## 3960 120 33 1 1
## 3961 1 34 1 1
## 3962 2 34 1 1
## 3963 3 34 1 1
## 3964 4 34 1 1
## 3965 5 34 1 1
## 3966 6 34 1 1
## 3967 7 34 1 1
## 3968 8 34 1 1
## 3969 9 34 1 1
## 3970 10 34 1 1
## 3971 11 34 1 1
## 3972 12 34 1 1
## 3973 13 34 1 1
## 3974 14 34 1 1
## 3975 15 34 1 1
## 3976 16 34 1 1
## 3977 17 34 1 1
## 3978 18 34 1 1
## 3979 19 34 1 1
## 3980 20 34 1 1
## 3981 21 34 1 1
## 3982 22 34 1 1
## 3983 23 34 1 1
## 3984 24 34 1 1
## 3985 25 34 1 1
## 3986 26 34 1 1
## 3987 27 34 1 1
## 3988 28 34 1 1
## 3989 29 34 1 1
## 3990 30 34 1 1
## 3991 31 34 1 1
## 3992 32 34 1 1
## 3993 33 34 1 1
## 3994 34 34 1 1
## 3995 35 34 1 1
## 3996 36 34 1 1
## 3997 37 34 1 1
## 3998 38 34 1 1
## 3999 39 34 1 1
## 4000 40 34 1 1
## 4001 41 34 1 1
## 4002 42 34 1 1
## 4003 43 34 1 1
## 4004 44 34 1 1
## 4005 45 34 1 1
## 4006 46 34 1 1
## 4007 47 34 1 1
## 4008 48 34 1 1
## 4009 49 34 1 1
## 4010 50 34 1 1
## 4011 51 34 1 1
## 4012 52 34 1 1
## 4013 53 34 1 1
## 4014 54 34 1 1
## 4015 55 34 1 1
## 4016 56 34 1 1
## 4017 57 34 1 1
## 4018 58 34 1 1
## 4019 59 34 1 1
## 4020 60 34 1 1
## 4021 61 34 1 1
## 4022 62 34 1 1
## 4023 63 34 1 1
## 4024 64 34 1 1
## 4025 65 34 1 1
## 4026 66 34 1 1
## 4027 67 34 1 1
## 4028 68 34 1 1
## 4029 69 34 1 1
## 4030 70 34 1 1
## 4031 71 34 1 1
## 4032 72 34 1 1
## 4033 73 34 1 1
## 4034 74 34 1 1
## 4035 75 34 1 1
## 4036 76 34 1 1
## 4037 77 34 1 1
## 4038 78 34 1 1
## 4039 79 34 1 1
## 4040 80 34 1 1
## 4041 81 34 1 1
## 4042 82 34 1 1
## 4043 83 34 1 1
## 4044 84 34 1 1
## 4045 85 34 1 1
## 4046 86 34 1 1
## 4047 87 34 1 1
## 4048 88 34 1 1
## 4049 89 34 1 1
## 4050 90 34 1 1
## 4051 91 34 1 1
## 4052 92 34 1 1
## 4053 93 34 1 1
## 4054 94 34 1 1
## 4055 95 34 1 1
## 4056 96 34 1 1
## 4057 97 34 1 1
## 4058 98 34 1 1
## 4059 99 34 1 1
## 4060 100 34 1 1
## 4061 101 34 1 1
## 4062 102 34 1 1
## 4063 103 34 1 1
## 4064 104 34 1 1
## 4065 105 34 1 1
## 4066 106 34 1 1
## 4067 107 34 1 1
## 4068 108 34 1 1
## 4069 109 34 1 1
## 4070 110 34 1 1
## 4071 111 34 1 1
## 4072 112 34 1 1
## 4073 113 34 1 1
## 4074 114 34 1 1
## 4075 115 34 1 1
## 4076 116 34 1 1
## 4077 117 34 1 1
## 4078 118 34 1 1
## 4079 119 34 1 1
## 4080 120 34 1 1
## 4081 1 35 1 1
## 4082 2 35 1 1
## 4083 3 35 1 1
## 4084 4 35 1 1
## 4085 5 35 1 1
## 4086 6 35 1 1
## 4087 7 35 1 1
## 4088 8 35 1 1
## 4089 9 35 1 1
## 4090 10 35 1 1
## 4091 11 35 1 1
## 4092 12 35 1 1
## 4093 13 35 1 1
## 4094 14 35 1 1
## 4095 15 35 1 1
## 4096 16 35 1 1
## 4097 17 35 1 1
## 4098 18 35 1 1
## 4099 19 35 1 1
## 4100 20 35 1 1
## 4101 21 35 1 1
## 4102 22 35 1 1
## 4103 23 35 1 1
## 4104 24 35 1 1
## 4105 25 35 1 1
## 4106 26 35 1 1
## 4107 27 35 1 1
## 4108 28 35 1 1
## 4109 29 35 1 1
## 4110 30 35 1 1
## 4111 31 35 1 1
## 4112 32 35 1 1
## 4113 33 35 1 1
## 4114 34 35 1 1
## 4115 35 35 1 1
## 4116 36 35 1 1
## 4117 37 35 1 1
## 4118 38 35 1 1
## 4119 39 35 1 1
## 4120 40 35 1 1
## 4121 41 35 1 1
## 4122 42 35 1 1
## 4123 43 35 1 1
## 4124 44 35 1 1
## 4125 45 35 1 1
## 4126 46 35 1 1
## 4127 47 35 1 1
## 4128 48 35 1 1
## 4129 49 35 1 1
## 4130 50 35 1 1
## 4131 51 35 1 1
## 4132 52 35 1 1
## 4133 53 35 1 1
## 4134 54 35 1 1
## 4135 55 35 1 1
## 4136 56 35 1 1
## 4137 57 35 1 1
## 4138 58 35 1 1
## 4139 59 35 1 1
## 4140 60 35 1 1
## 4141 61 35 1 1
## 4142 62 35 1 1
## 4143 63 35 1 1
## 4144 64 35 1 1
## 4145 65 35 1 1
## 4146 66 35 1 1
## 4147 67 35 1 1
## 4148 68 35 1 1
## 4149 69 35 1 1
## 4150 70 35 1 1
## 4151 71 35 1 1
## 4152 72 35 1 1
## 4153 73 35 1 1
## 4154 74 35 1 1
## 4155 75 35 1 1
## 4156 76 35 1 1
## 4157 77 35 1 1
## 4158 78 35 1 1
## 4159 79 35 1 1
## 4160 80 35 1 1
## 4161 81 35 1 1
## 4162 82 35 1 1
## 4163 83 35 1 1
## 4164 84 35 1 1
## 4165 85 35 1 1
## 4166 86 35 1 1
## 4167 87 35 1 1
## 4168 88 35 1 1
## 4169 89 35 1 1
## 4170 90 35 1 1
## 4171 91 35 1 1
## 4172 92 35 1 1
## 4173 93 35 1 1
## 4174 94 35 1 1
## 4175 95 35 1 1
## 4176 96 35 1 1
## 4177 97 35 1 1
## 4178 98 35 1 1
## 4179 99 35 1 1
## 4180 100 35 1 1
## 4181 101 35 1 1
## 4182 102 35 1 1
## 4183 103 35 1 1
## 4184 104 35 1 1
## 4185 105 35 1 1
## 4186 106 35 1 1
## 4187 107 35 1 1
## 4188 108 35 1 1
## 4189 109 35 1 1
## 4190 110 35 1 1
## 4191 111 35 1 1
## 4192 112 35 1 1
## 4193 113 35 1 1
## 4194 114 35 1 1
## 4195 115 35 1 1
## 4196 116 35 1 1
## 4197 117 35 1 1
## 4198 118 35 1 1
## 4199 119 35 1 1
## 4200 120 35 1 1
## 4201 1 36 1 1
## 4202 2 36 1 1
## 4203 3 36 1 1
## 4204 4 36 1 1
## 4205 5 36 1 1
## 4206 6 36 1 1
## 4207 7 36 1 1
## 4208 8 36 1 1
## 4209 9 36 1 1
## 4210 10 36 1 1
## 4211 11 36 1 1
## 4212 12 36 1 1
## 4213 13 36 1 1
## 4214 14 36 1 1
## 4215 15 36 1 1
## 4216 16 36 1 1
## 4217 17 36 1 1
## 4218 18 36 1 1
## 4219 19 36 1 1
## 4220 20 36 1 1
## 4221 21 36 1 1
## 4222 22 36 1 1
## 4223 23 36 1 1
## 4224 24 36 1 1
## 4225 25 36 1 1
## 4226 26 36 1 1
## 4227 27 36 1 1
## 4228 28 36 1 1
## 4229 29 36 1 1
## 4230 30 36 1 1
## 4231 31 36 1 1
## 4232 32 36 1 1
## 4233 33 36 1 1
## 4234 34 36 1 1
## 4235 35 36 1 1
## 4236 36 36 1 1
## 4237 37 36 1 1
## 4238 38 36 1 1
## 4239 39 36 1 1
## 4240 40 36 1 1
## 4241 41 36 1 1
## 4242 42 36 1 1
## 4243 43 36 1 1
## 4244 44 36 1 1
## 4245 45 36 1 1
## 4246 46 36 1 1
## 4247 47 36 1 1
## 4248 48 36 1 1
## 4249 49 36 1 1
## 4250 50 36 1 1
## 4251 51 36 1 1
## 4252 52 36 1 1
## 4253 53 36 1 1
## 4254 54 36 1 1
## 4255 55 36 1 1
## 4256 56 36 1 1
## 4257 57 36 1 1
## 4258 58 36 1 1
## 4259 59 36 1 1
## 4260 60 36 1 1
## 4261 61 36 1 1
## 4262 62 36 1 1
## 4263 63 36 1 1
## 4264 64 36 1 1
## 4265 65 36 1 1
## 4266 66 36 1 1
## 4267 67 36 1 1
## 4268 68 36 1 1
## 4269 69 36 1 1
## 4270 70 36 1 1
## 4271 71 36 1 1
## 4272 72 36 1 1
## 4273 73 36 1 1
## 4274 74 36 1 1
## 4275 75 36 1 1
## 4276 76 36 1 1
## 4277 77 36 1 1
## 4278 78 36 1 1
## 4279 79 36 1 1
## 4280 80 36 1 1
## 4281 81 36 1 1
## 4282 82 36 1 1
## 4283 83 36 1 1
## 4284 84 36 1 1
## 4285 85 36 1 1
## 4286 86 36 1 1
## 4287 87 36 1 1
## 4288 88 36 1 1
## 4289 89 36 1 1
## 4290 90 36 1 1
## 4291 91 36 1 1
## 4292 92 36 1 1
## 4293 93 36 1 1
## 4294 94 36 1 1
## 4295 95 36 1 1
## 4296 96 36 1 1
## 4297 97 36 1 1
## 4298 98 36 1 1
## 4299 99 36 1 1
## 4300 100 36 1 1
## 4301 101 36 1 1
## 4302 102 36 1 1
## 4303 103 36 1 1
## 4304 104 36 1 1
## 4305 105 36 1 1
## 4306 106 36 1 1
## 4307 107 36 1 1
## 4308 108 36 1 1
## 4309 109 36 1 1
## 4310 110 36 1 1
## 4311 111 36 1 1
## 4312 112 36 1 1
## 4313 113 36 1 1
## 4314 114 36 1 1
## 4315 115 36 1 1
## 4316 116 36 1 1
## 4317 117 36 1 1
## 4318 118 36 1 1
## 4319 119 36 1 1
## 4320 120 36 1 1
## 4321 1 37 1 1
## 4322 2 37 1 1
## 4323 3 37 1 1
## 4324 4 37 1 1
## 4325 5 37 1 1
## 4326 6 37 1 1
## 4327 7 37 1 1
## 4328 8 37 1 1
## 4329 9 37 1 1
## 4330 10 37 1 1
## 4331 11 37 1 1
## 4332 12 37 1 1
## 4333 13 37 1 1
## 4334 14 37 1 1
## 4335 15 37 1 1
## 4336 16 37 1 1
## 4337 17 37 1 1
## 4338 18 37 1 1
## 4339 19 37 1 1
## 4340 20 37 1 1
## 4341 21 37 1 1
## 4342 22 37 1 1
## 4343 23 37 1 1
## 4344 24 37 1 1
## 4345 25 37 1 1
## 4346 26 37 1 1
## 4347 27 37 1 1
## 4348 28 37 1 1
## 4349 29 37 1 1
## 4350 30 37 1 1
## 4351 31 37 1 1
## 4352 32 37 1 1
## 4353 33 37 1 1
## 4354 34 37 1 1
## 4355 35 37 1 1
## 4356 36 37 1 1
## 4357 37 37 1 1
## 4358 38 37 1 1
## 4359 39 37 1 1
## 4360 40 37 1 1
## 4361 41 37 1 1
## 4362 42 37 1 1
## 4363 43 37 1 1
## 4364 44 37 1 1
## 4365 45 37 1 1
## 4366 46 37 1 1
## 4367 47 37 1 1
## 4368 48 37 1 1
## 4369 49 37 1 1
## 4370 50 37 1 1
## 4371 51 37 1 1
## 4372 52 37 1 1
## 4373 53 37 1 1
## 4374 54 37 1 1
## 4375 55 37 1 1
## 4376 56 37 1 1
## 4377 57 37 1 1
## 4378 58 37 1 1
## 4379 59 37 1 1
## 4380 60 37 1 1
## 4381 61 37 1 1
## 4382 62 37 1 1
## 4383 63 37 1 1
## 4384 64 37 1 1
## 4385 65 37 1 1
## 4386 66 37 1 1
## 4387 67 37 1 1
## 4388 68 37 1 1
## 4389 69 37 1 1
## 4390 70 37 1 1
## 4391 71 37 1 1
## 4392 72 37 1 1
## 4393 73 37 1 1
## 4394 74 37 1 1
## 4395 75 37 1 1
## 4396 76 37 1 1
## 4397 77 37 1 1
## 4398 78 37 1 1
## 4399 79 37 1 1
## 4400 80 37 1 1
## 4401 81 37 1 1
## 4402 82 37 1 1
## 4403 83 37 1 1
## 4404 84 37 1 1
## 4405 85 37 1 1
## 4406 86 37 1 1
## 4407 87 37 1 1
## 4408 88 37 1 1
## 4409 89 37 1 1
## 4410 90 37 1 1
## 4411 91 37 1 1
## 4412 92 37 1 1
## 4413 93 37 1 1
## 4414 94 37 1 1
## 4415 95 37 1 1
## 4416 96 37 1 1
## 4417 97 37 1 1
## 4418 98 37 1 1
## 4419 99 37 1 1
## 4420 100 37 1 1
## 4421 101 37 1 1
## 4422 102 37 1 1
## 4423 103 37 1 1
## 4424 104 37 1 1
## 4425 105 37 1 1
## 4426 106 37 1 1
## 4427 107 37 1 1
## 4428 108 37 1 1
## 4429 109 37 1 1
## 4430 110 37 1 1
## 4431 111 37 1 1
## 4432 112 37 1 1
## 4433 113 37 1 1
## 4434 114 37 1 1
## 4435 115 37 1 1
## 4436 116 37 1 1
## 4437 117 37 1 1
## 4438 118 37 1 1
## 4439 119 37 1 1
## 4440 120 37 1 1
## 4441 1 38 1 1
## 4442 2 38 1 1
## 4443 3 38 1 1
## 4444 4 38 1 1
## 4445 5 38 1 1
## 4446 6 38 1 1
## 4447 7 38 1 1
## 4448 8 38 1 1
## 4449 9 38 1 1
## 4450 10 38 1 1
## 4451 11 38 1 1
## 4452 12 38 1 1
## 4453 13 38 1 1
## 4454 14 38 1 1
## 4455 15 38 1 1
## 4456 16 38 1 1
## 4457 17 38 1 1
## 4458 18 38 1 1
## 4459 19 38 1 1
## 4460 20 38 1 1
## 4461 21 38 1 1
## 4462 22 38 1 1
## 4463 23 38 1 1
## 4464 24 38 1 1
## 4465 25 38 1 1
## 4466 26 38 1 1
## 4467 27 38 1 1
## 4468 28 38 1 1
## 4469 29 38 1 1
## 4470 30 38 1 1
## 4471 31 38 1 1
## 4472 32 38 1 1
## 4473 33 38 1 1
## 4474 34 38 1 1
## 4475 35 38 1 1
## 4476 36 38 1 1
## 4477 37 38 1 1
## 4478 38 38 1 1
## 4479 39 38 1 1
## 4480 40 38 1 1
## 4481 41 38 1 1
## 4482 42 38 1 1
## 4483 43 38 1 1
## 4484 44 38 1 1
## 4485 45 38 1 1
## 4486 46 38 1 1
## 4487 47 38 1 1
## 4488 48 38 1 1
## 4489 49 38 1 1
## 4490 50 38 1 1
## 4491 51 38 1 1
## 4492 52 38 1 1
## 4493 53 38 1 1
## 4494 54 38 1 1
## 4495 55 38 1 1
## 4496 56 38 1 1
## 4497 57 38 1 1
## 4498 58 38 1 1
## 4499 59 38 1 1
## 4500 60 38 1 1
## 4501 61 38 1 1
## 4502 62 38 1 1
## 4503 63 38 1 1
## 4504 64 38 1 1
## 4505 65 38 1 1
## 4506 66 38 1 1
## 4507 67 38 1 1
## 4508 68 38 1 1
## 4509 69 38 1 1
## 4510 70 38 1 1
## 4511 71 38 1 1
## 4512 72 38 1 1
## 4513 73 38 1 1
## 4514 74 38 1 1
## 4515 75 38 1 1
## 4516 76 38 1 1
## 4517 77 38 1 1
## 4518 78 38 1 1
## 4519 79 38 1 1
## 4520 80 38 1 1
## 4521 81 38 1 1
## 4522 82 38 1 1
## 4523 83 38 1 1
## 4524 84 38 1 1
## 4525 85 38 1 1
## 4526 86 38 1 1
## 4527 87 38 1 1
## 4528 88 38 1 1
## 4529 89 38 1 1
## 4530 90 38 1 1
## 4531 91 38 1 1
## 4532 92 38 1 1
## 4533 93 38 1 1
## 4534 94 38 1 1
## 4535 95 38 1 1
## 4536 96 38 1 1
## 4537 97 38 1 1
## 4538 98 38 1 1
## 4539 99 38 1 1
## 4540 100 38 1 1
## 4541 101 38 1 1
## 4542 102 38 1 1
## 4543 103 38 1 1
## 4544 104 38 1 1
## 4545 105 38 1 1
## 4546 106 38 1 1
## 4547 107 38 1 1
## 4548 108 38 1 1
## 4549 109 38 1 1
## 4550 110 38 1 1
## 4551 111 38 1 1
## 4552 112 38 1 1
## 4553 113 38 1 1
## 4554 114 38 1 1
## 4555 115 38 1 1
## 4556 116 38 1 1
## 4557 117 38 1 1
## 4558 118 38 1 1
## 4559 119 38 1 1
## 4560 120 38 1 1
## 4561 1 39 1 1
## 4562 2 39 1 1
## 4563 3 39 1 1
## 4564 4 39 1 1
## 4565 5 39 1 1
## 4566 6 39 1 1
## 4567 7 39 1 1
## 4568 8 39 1 1
## 4569 9 39 1 1
## 4570 10 39 1 1
## 4571 11 39 1 1
## 4572 12 39 1 1
## 4573 13 39 1 1
## 4574 14 39 1 1
## 4575 15 39 1 1
## 4576 16 39 1 1
## 4577 17 39 1 1
## 4578 18 39 1 1
## 4579 19 39 1 1
## 4580 20 39 1 1
## 4581 21 39 1 1
## 4582 22 39 1 1
## 4583 23 39 1 1
## 4584 24 39 1 1
## 4585 25 39 1 1
## 4586 26 39 1 1
## 4587 27 39 1 1
## 4588 28 39 1 1
## 4589 29 39 1 1
## 4590 30 39 1 1
## 4591 31 39 1 1
## 4592 32 39 1 1
## 4593 33 39 1 1
## 4594 34 39 1 1
## 4595 35 39 1 1
## 4596 36 39 1 1
## 4597 37 39 1 1
## 4598 38 39 1 1
## 4599 39 39 1 1
## 4600 40 39 1 1
## 4601 41 39 1 1
## 4602 42 39 1 1
## 4603 43 39 1 1
## 4604 44 39 1 1
## 4605 45 39 1 1
## 4606 46 39 1 1
## 4607 47 39 1 1
## 4608 48 39 1 1
## 4609 49 39 1 1
## 4610 50 39 1 1
## 4611 51 39 1 1
## 4612 52 39 1 1
## 4613 53 39 1 1
## 4614 54 39 1 1
## 4615 55 39 1 1
## 4616 56 39 1 1
## 4617 57 39 1 1
## 4618 58 39 1 1
## 4619 59 39 1 1
## 4620 60 39 1 1
## 4621 61 39 1 1
## 4622 62 39 1 1
## 4623 63 39 1 1
## 4624 64 39 1 1
## 4625 65 39 1 1
## 4626 66 39 1 1
## 4627 67 39 1 1
## 4628 68 39 1 1
## 4629 69 39 1 1
## 4630 70 39 1 1
## 4631 71 39 1 1
## 4632 72 39 1 1
## 4633 73 39 1 1
## 4634 74 39 1 1
## 4635 75 39 1 1
## 4636 76 39 1 1
## 4637 77 39 1 1
## 4638 78 39 1 1
## 4639 79 39 1 1
## 4640 80 39 1 1
## 4641 81 39 1 1
## 4642 82 39 1 1
## 4643 83 39 1 1
## 4644 84 39 1 1
## 4645 85 39 1 1
## 4646 86 39 1 1
## 4647 87 39 1 1
## 4648 88 39 1 1
## 4649 89 39 1 1
## 4650 90 39 1 1
## 4651 91 39 1 1
## 4652 92 39 1 1
## 4653 93 39 1 1
## 4654 94 39 1 1
## 4655 95 39 1 1
## 4656 96 39 1 1
## 4657 97 39 1 1
## 4658 98 39 1 1
## 4659 99 39 1 1
## 4660 100 39 1 1
## 4661 101 39 1 1
## 4662 102 39 1 1
## 4663 103 39 1 1
## 4664 104 39 1 1
## 4665 105 39 1 1
## 4666 106 39 1 1
## 4667 107 39 1 1
## 4668 108 39 1 1
## 4669 109 39 1 1
## 4670 110 39 1 1
## 4671 111 39 1 1
## 4672 112 39 1 1
## 4673 113 39 1 1
## 4674 114 39 1 1
## 4675 115 39 1 1
## 4676 116 39 1 1
## 4677 117 39 1 1
## 4678 118 39 1 1
## 4679 119 39 1 1
## 4680 120 39 1 1
## 4681 1 40 1 1
## 4682 2 40 1 1
## 4683 3 40 1 1
## 4684 4 40 1 1
## 4685 5 40 1 1
## 4686 6 40 1 1
## 4687 7 40 1 1
## 4688 8 40 1 1
## 4689 9 40 1 1
## 4690 10 40 1 1
## 4691 11 40 1 1
## 4692 12 40 1 1
## 4693 13 40 1 1
## 4694 14 40 1 1
## 4695 15 40 1 1
## 4696 16 40 1 1
## 4697 17 40 1 1
## 4698 18 40 1 1
## 4699 19 40 1 1
## 4700 20 40 1 1
## 4701 21 40 1 1
## 4702 22 40 1 1
## 4703 23 40 1 1
## 4704 24 40 1 1
## 4705 25 40 1 1
## 4706 26 40 1 1
## 4707 27 40 1 1
## 4708 28 40 1 1
## 4709 29 40 1 1
## 4710 30 40 1 1
## 4711 31 40 1 1
## 4712 32 40 1 1
## 4713 33 40 1 1
## 4714 34 40 1 1
## 4715 35 40 1 1
## 4716 36 40 1 1
## 4717 37 40 1 1
## 4718 38 40 1 1
## 4719 39 40 1 1
## 4720 40 40 1 1
## 4721 41 40 1 1
## 4722 42 40 1 1
## 4723 43 40 1 1
## 4724 44 40 1 1
## 4725 45 40 1 1
## 4726 46 40 1 1
## 4727 47 40 1 1
## 4728 48 40 1 1
## 4729 49 40 1 1
## 4730 50 40 1 1
## 4731 51 40 1 1
## 4732 52 40 1 1
## 4733 53 40 1 1
## 4734 54 40 1 1
## 4735 55 40 1 1
## 4736 56 40 1 1
## 4737 57 40 1 1
## 4738 58 40 1 1
## 4739 59 40 1 1
## 4740 60 40 1 1
## 4741 61 40 1 1
## 4742 62 40 1 1
## 4743 63 40 1 1
## 4744 64 40 1 1
## 4745 65 40 1 1
## 4746 66 40 1 1
## 4747 67 40 1 1
## 4748 68 40 1 1
## 4749 69 40 1 1
## 4750 70 40 1 1
## 4751 71 40 1 1
## 4752 72 40 1 1
## 4753 73 40 1 1
## 4754 74 40 1 1
## 4755 75 40 1 1
## 4756 76 40 1 1
## 4757 77 40 1 1
## 4758 78 40 1 1
## 4759 79 40 1 1
## 4760 80 40 1 1
## 4761 81 40 1 1
## 4762 82 40 1 1
## 4763 83 40 1 1
## 4764 84 40 1 1
## 4765 85 40 1 1
## 4766 86 40 1 1
## 4767 87 40 1 1
## 4768 88 40 1 1
## 4769 89 40 1 1
## 4770 90 40 1 1
## 4771 91 40 1 1
## 4772 92 40 1 1
## 4773 93 40 1 1
## 4774 94 40 1 1
## 4775 95 40 1 1
## 4776 96 40 1 1
## 4777 97 40 1 1
## 4778 98 40 1 1
## 4779 99 40 1 1
## 4780 100 40 1 1
## 4781 101 40 1 1
## 4782 102 40 1 1
## 4783 103 40 1 1
## 4784 104 40 1 1
## 4785 105 40 1 1
## 4786 106 40 1 1
## 4787 107 40 1 1
## 4788 108 40 1 1
## 4789 109 40 1 1
## 4790 110 40 1 1
## 4791 111 40 1 1
## 4792 112 40 1 1
## 4793 113 40 1 1
## 4794 114 40 1 1
## 4795 115 40 1 1
## 4796 116 40 1 1
## 4797 117 40 1 1
## 4798 118 40 1 1
## 4799 119 40 1 1
## 4800 120 40 1 1
## 4801 1 41 1 1
## 4802 2 41 1 1
## 4803 3 41 1 1
## 4804 4 41 1 1
## 4805 5 41 1 1
## 4806 6 41 1 1
## 4807 7 41 1 1
## 4808 8 41 1 1
## 4809 9 41 1 1
## 4810 10 41 1 1
## 4811 11 41 1 1
## 4812 12 41 1 1
## 4813 13 41 1 1
## 4814 14 41 1 1
## 4815 15 41 1 1
## 4816 16 41 1 1
## 4817 17 41 1 1
## 4818 18 41 1 1
## 4819 19 41 1 1
## 4820 20 41 1 1
## 4821 21 41 1 1
## 4822 22 41 1 1
## 4823 23 41 1 1
## 4824 24 41 1 1
## 4825 25 41 1 1
## 4826 26 41 1 1
## 4827 27 41 1 1
## 4828 28 41 1 1
## 4829 29 41 1 1
## 4830 30 41 1 1
## 4831 31 41 1 1
## 4832 32 41 1 1
## 4833 33 41 1 1
## 4834 34 41 1 1
## 4835 35 41 1 1
## 4836 36 41 1 1
## 4837 37 41 1 1
## 4838 38 41 1 1
## 4839 39 41 1 1
## 4840 40 41 1 1
## 4841 41 41 1 1
## 4842 42 41 1 1
## 4843 43 41 1 1
## 4844 44 41 1 1
## 4845 45 41 1 1
## 4846 46 41 1 1
## 4847 47 41 1 1
## 4848 48 41 1 1
## 4849 49 41 1 1
## 4850 50 41 1 1
## 4851 51 41 1 1
## 4852 52 41 1 1
## 4853 53 41 1 1
## 4854 54 41 1 1
## 4855 55 41 1 1
## 4856 56 41 1 1
## 4857 57 41 1 1
## 4858 58 41 1 1
## 4859 59 41 1 1
## 4860 60 41 1 1
## 4861 61 41 1 1
## 4862 62 41 1 1
## 4863 63 41 1 1
## 4864 64 41 1 1
## 4865 65 41 1 1
## 4866 66 41 1 1
## 4867 67 41 1 1
## 4868 68 41 1 1
## 4869 69 41 1 1
## 4870 70 41 1 1
## 4871 71 41 1 1
## 4872 72 41 1 1
## 4873 73 41 1 1
## 4874 74 41 1 1
## 4875 75 41 1 1
## 4876 76 41 1 1
## 4877 77 41 1 1
## 4878 78 41 1 1
## 4879 79 41 1 1
## 4880 80 41 1 1
## 4881 81 41 1 1
## 4882 82 41 1 1
## 4883 83 41 1 1
## 4884 84 41 1 1
## 4885 85 41 1 1
## 4886 86 41 1 1
## 4887 87 41 1 1
## 4888 88 41 1 1
## 4889 89 41 1 1
## 4890 90 41 1 1
## 4891 91 41 1 1
## 4892 92 41 1 1
## 4893 93 41 1 1
## 4894 94 41 1 1
## 4895 95 41 1 1
## 4896 96 41 1 1
## 4897 97 41 1 1
## 4898 98 41 1 1
## 4899 99 41 1 1
## 4900 100 41 1 1
## 4901 101 41 1 1
## 4902 102 41 1 1
## 4903 103 41 1 1
## 4904 104 41 1 1
## 4905 105 41 1 1
## 4906 106 41 1 1
## 4907 107 41 1 1
## 4908 108 41 1 1
## 4909 109 41 1 1
## 4910 110 41 1 1
## 4911 111 41 1 1
## 4912 112 41 1 1
## 4913 113 41 1 1
## 4914 114 41 1 1
## 4915 115 41 1 1
## 4916 116 41 1 1
## 4917 117 41 1 1
## 4918 118 41 1 1
## 4919 119 41 1 1
## 4920 120 41 1 1
## 4921 1 42 1 1
## 4922 2 42 1 1
## 4923 3 42 1 1
## 4924 4 42 1 1
## 4925 5 42 1 1
## 4926 6 42 1 1
## 4927 7 42 1 1
## 4928 8 42 1 1
## 4929 9 42 1 1
## 4930 10 42 1 1
## 4931 11 42 1 1
## 4932 12 42 1 1
## 4933 13 42 1 1
## 4934 14 42 1 1
## 4935 15 42 1 1
## 4936 16 42 1 1
## 4937 17 42 1 1
## 4938 18 42 1 1
## 4939 19 42 1 1
## 4940 20 42 1 1
## 4941 21 42 1 1
## 4942 22 42 1 1
## 4943 23 42 1 1
## 4944 24 42 1 1
## 4945 25 42 1 1
## 4946 26 42 1 1
## 4947 27 42 1 1
## 4948 28 42 1 1
## 4949 29 42 1 1
## 4950 30 42 1 1
## 4951 31 42 1 1
## 4952 32 42 1 1
## 4953 33 42 1 1
## 4954 34 42 1 1
## 4955 35 42 1 1
## 4956 36 42 1 1
## 4957 37 42 1 1
## 4958 38 42 1 1
## 4959 39 42 1 1
## 4960 40 42 1 1
## 4961 41 42 1 1
## 4962 42 42 1 1
## 4963 43 42 1 1
## 4964 44 42 1 1
## 4965 45 42 1 1
## 4966 46 42 1 1
## 4967 47 42 1 1
## 4968 48 42 1 1
## 4969 49 42 1 1
## 4970 50 42 1 1
## 4971 51 42 1 1
## 4972 52 42 1 1
## 4973 53 42 1 1
## 4974 54 42 1 1
## 4975 55 42 1 1
## 4976 56 42 1 1
## 4977 57 42 1 1
## 4978 58 42 1 1
## 4979 59 42 1 1
## 4980 60 42 1 1
## 4981 61 42 1 1
## 4982 62 42 1 1
## 4983 63 42 1 1
## 4984 64 42 1 1
## 4985 65 42 1 1
## 4986 66 42 1 1
## 4987 67 42 1 1
## 4988 68 42 1 1
## 4989 69 42 1 1
## 4990 70 42 1 1
## 4991 71 42 1 1
## 4992 72 42 1 1
## 4993 73 42 1 1
## 4994 74 42 1 1
## 4995 75 42 1 1
## 4996 76 42 1 1
## 4997 77 42 1 1
## 4998 78 42 1 1
## 4999 79 42 1 1
## 5000 80 42 1 1
## 5001 81 42 1 1
## 5002 82 42 1 1
## 5003 83 42 1 1
## 5004 84 42 1 1
## 5005 85 42 1 1
## 5006 86 42 1 1
## 5007 87 42 1 1
## 5008 88 42 1 1
## 5009 89 42 1 1
## 5010 90 42 1 1
## 5011 91 42 1 1
## 5012 92 42 1 1
## 5013 93 42 1 1
## 5014 94 42 1 1
## 5015 95 42 1 1
## 5016 96 42 1 1
## 5017 97 42 1 1
## 5018 98 42 1 1
## 5019 99 42 1 1
## 5020 100 42 1 1
## 5021 101 42 1 1
## 5022 102 42 1 1
## 5023 103 42 1 1
## 5024 104 42 1 1
## 5025 105 42 1 1
## 5026 106 42 1 1
## 5027 107 42 1 1
## 5028 108 42 1 1
## 5029 109 42 1 1
## 5030 110 42 1 1
## 5031 111 42 1 1
## 5032 112 42 1 1
## 5033 113 42 1 1
## 5034 114 42 1 1
## 5035 115 42 1 1
## 5036 116 42 1 1
## 5037 117 42 1 1
## 5038 118 42 1 1
## 5039 119 42 1 1
## 5040 120 42 1 1
## 5041 1 43 1 1
## 5042 2 43 1 1
## 5043 3 43 1 1
## 5044 4 43 1 1
## 5045 5 43 1 1
## 5046 6 43 1 1
## 5047 7 43 1 1
## 5048 8 43 1 1
## 5049 9 43 1 1
## 5050 10 43 1 1
## 5051 11 43 1 1
## 5052 12 43 1 1
## 5053 13 43 1 1
## 5054 14 43 1 1
## 5055 15 43 1 1
## 5056 16 43 1 1
## 5057 17 43 1 1
## 5058 18 43 1 1
## 5059 19 43 1 1
## 5060 20 43 1 1
## 5061 21 43 1 1
## 5062 22 43 1 1
## 5063 23 43 1 1
## 5064 24 43 1 1
## 5065 25 43 1 1
## 5066 26 43 1 1
## 5067 27 43 1 1
## 5068 28 43 1 1
## 5069 29 43 1 1
## 5070 30 43 1 1
## 5071 31 43 1 1
## 5072 32 43 1 1
## 5073 33 43 1 1
## 5074 34 43 1 1
## 5075 35 43 1 1
## 5076 36 43 1 1
## 5077 37 43 1 1
## 5078 38 43 1 1
## 5079 39 43 1 1
## 5080 40 43 1 1
## 5081 41 43 1 1
## 5082 42 43 1 1
## 5083 43 43 1 1
## 5084 44 43 1 1
## 5085 45 43 1 1
## 5086 46 43 1 1
## 5087 47 43 1 1
## 5088 48 43 1 1
## 5089 49 43 1 1
## 5090 50 43 1 1
## 5091 51 43 1 1
## 5092 52 43 1 1
## 5093 53 43 1 1
## 5094 54 43 1 1
## 5095 55 43 1 1
## 5096 56 43 1 1
## 5097 57 43 1 1
## 5098 58 43 1 1
## 5099 59 43 1 1
## 5100 60 43 1 1
## 5101 61 43 1 1
## 5102 62 43 1 1
## 5103 63 43 1 1
## 5104 64 43 1 1
## 5105 65 43 1 1
## 5106 66 43 1 1
## 5107 67 43 1 1
## 5108 68 43 1 1
## 5109 69 43 1 1
## 5110 70 43 1 1
## 5111 71 43 1 1
## 5112 72 43 1 1
## 5113 73 43 1 1
## 5114 74 43 1 1
## 5115 75 43 1 1
## 5116 76 43 1 1
## 5117 77 43 1 1
## 5118 78 43 1 1
## 5119 79 43 1 1
## 5120 80 43 1 1
## 5121 81 43 1 1
## 5122 82 43 1 1
## 5123 83 43 1 1
## 5124 84 43 1 1
## 5125 85 43 1 1
## 5126 86 43 1 1
## 5127 87 43 1 1
## 5128 88 43 1 1
## 5129 89 43 1 1
## 5130 90 43 1 1
## 5131 91 43 1 1
## 5132 92 43 1 1
## 5133 93 43 1 1
## 5134 94 43 1 1
## 5135 95 43 1 1
## 5136 96 43 1 1
## 5137 97 43 1 1
## 5138 98 43 1 1
## 5139 99 43 1 1
## 5140 100 43 1 1
## 5141 101 43 1 1
## 5142 102 43 1 1
## 5143 103 43 1 1
## 5144 104 43 1 1
## 5145 105 43 1 1
## 5146 106 43 1 1
## 5147 107 43 1 1
## 5148 108 43 1 1
## 5149 109 43 1 1
## 5150 110 43 1 1
## 5151 111 43 1 1
## 5152 112 43 1 1
## 5153 113 43 1 1
## 5154 114 43 1 1
## 5155 115 43 1 1
## 5156 116 43 1 1
## 5157 117 43 1 1
## 5158 118 43 1 1
## 5159 119 43 1 1
## 5160 120 43 1 1
## 5161 1 44 1 1
## 5162 2 44 1 1
## 5163 3 44 1 1
## 5164 4 44 1 1
## 5165 5 44 1 1
## 5166 6 44 1 1
## 5167 7 44 1 1
## 5168 8 44 1 1
## 5169 9 44 1 1
## 5170 10 44 1 1
## 5171 11 44 1 1
## 5172 12 44 1 1
## 5173 13 44 1 1
## 5174 14 44 1 1
## 5175 15 44 1 1
## 5176 16 44 1 1
## 5177 17 44 1 1
## 5178 18 44 1 1
## 5179 19 44 1 1
## 5180 20 44 1 1
## 5181 21 44 1 1
## 5182 22 44 1 1
## 5183 23 44 1 1
## 5184 24 44 1 1
## 5185 25 44 1 1
## 5186 26 44 1 1
## 5187 27 44 1 1
## 5188 28 44 1 1
## 5189 29 44 1 1
## 5190 30 44 1 1
## 5191 31 44 1 1
## 5192 32 44 1 1
## 5193 33 44 1 1
## 5194 34 44 1 1
## 5195 35 44 1 1
## 5196 36 44 1 1
## 5197 37 44 1 1
## 5198 38 44 1 1
## 5199 39 44 1 1
## 5200 40 44 1 1
## 5201 41 44 1 1
## 5202 42 44 1 1
## 5203 43 44 1 1
## 5204 44 44 1 1
## 5205 45 44 1 1
## 5206 46 44 1 1
## 5207 47 44 1 1
## 5208 48 44 1 1
## 5209 49 44 1 1
## 5210 50 44 1 1
## 5211 51 44 1 1
## 5212 52 44 1 1
## 5213 53 44 1 1
## 5214 54 44 1 1
## 5215 55 44 1 1
## 5216 56 44 1 1
## 5217 57 44 1 1
## 5218 58 44 1 1
## 5219 59 44 1 1
## 5220 60 44 1 1
## 5221 61 44 1 1
## 5222 62 44 1 1
## 5223 63 44 1 1
## 5224 64 44 1 1
## 5225 65 44 1 1
## 5226 66 44 1 1
## 5227 67 44 1 1
## 5228 68 44 1 1
## 5229 69 44 1 1
## 5230 70 44 1 1
## 5231 71 44 1 1
## 5232 72 44 1 1
## 5233 73 44 1 1
## 5234 74 44 1 1
## 5235 75 44 1 1
## 5236 76 44 1 1
## 5237 77 44 1 1
## 5238 78 44 1 1
## 5239 79 44 1 1
## 5240 80 44 1 1
## 5241 81 44 1 1
## 5242 82 44 1 1
## 5243 83 44 1 1
## 5244 84 44 1 1
## 5245 85 44 1 1
## 5246 86 44 1 1
## 5247 87 44 1 1
## 5248 88 44 1 1
## 5249 89 44 1 1
## 5250 90 44 1 1
## 5251 91 44 1 1
## 5252 92 44 1 1
## 5253 93 44 1 1
## 5254 94 44 1 1
## 5255 95 44 1 1
## 5256 96 44 1 1
## 5257 97 44 1 1
## 5258 98 44 1 1
## 5259 99 44 1 1
## 5260 100 44 1 1
## 5261 101 44 1 1
## 5262 102 44 1 1
## 5263 103 44 1 1
## 5264 104 44 1 1
## 5265 105 44 1 1
## 5266 106 44 1 1
## 5267 107 44 1 1
## 5268 108 44 1 1
## 5269 109 44 1 1
## 5270 110 44 1 1
## 5271 111 44 1 1
## 5272 112 44 1 1
## 5273 113 44 1 1
## 5274 114 44 1 1
## 5275 115 44 1 1
## 5276 116 44 1 1
## 5277 117 44 1 1
## 5278 118 44 1 1
## 5279 119 44 1 1
## 5280 120 44 1 1
## 5281 1 45 1 1
## 5282 2 45 1 1
## 5283 3 45 1 1
## 5284 4 45 1 1
## 5285 5 45 1 1
## 5286 6 45 1 1
## 5287 7 45 1 1
## 5288 8 45 1 1
## 5289 9 45 1 1
## 5290 10 45 1 1
## 5291 11 45 1 1
## 5292 12 45 1 1
## 5293 13 45 1 1
## 5294 14 45 1 1
## 5295 15 45 1 1
## 5296 16 45 1 1
## 5297 17 45 1 1
## 5298 18 45 1 1
## 5299 19 45 1 1
## 5300 20 45 1 1
## 5301 21 45 1 1
## 5302 22 45 1 1
## 5303 23 45 1 1
## 5304 24 45 1 1
## 5305 25 45 1 1
## 5306 26 45 1 1
## 5307 27 45 1 1
## 5308 28 45 1 1
## 5309 29 45 1 1
## 5310 30 45 1 1
## 5311 31 45 1 1
## 5312 32 45 1 1
## 5313 33 45 1 1
## 5314 34 45 1 1
## 5315 35 45 1 1
## 5316 36 45 1 1
## 5317 37 45 1 1
## 5318 38 45 1 1
## 5319 39 45 1 1
## 5320 40 45 1 1
## 5321 41 45 1 1
## 5322 42 45 1 1
## 5323 43 45 1 1
## 5324 44 45 1 1
## 5325 45 45 1 1
## 5326 46 45 1 1
## 5327 47 45 1 1
## 5328 48 45 1 1
## 5329 49 45 1 1
## 5330 50 45 1 1
## 5331 51 45 1 1
## 5332 52 45 1 1
## 5333 53 45 1 1
## 5334 54 45 1 1
## 5335 55 45 1 1
## 5336 56 45 1 1
## 5337 57 45 1 1
## 5338 58 45 1 1
## 5339 59 45 1 1
## 5340 60 45 1 1
## 5341 61 45 1 1
## 5342 62 45 1 1
## 5343 63 45 1 1
## 5344 64 45 1 1
## 5345 65 45 1 1
## 5346 66 45 1 1
## 5347 67 45 1 1
## 5348 68 45 1 1
## 5349 69 45 1 1
## 5350 70 45 1 1
## 5351 71 45 1 1
## 5352 72 45 1 1
## 5353 73 45 1 1
## 5354 74 45 1 1
## 5355 75 45 1 1
## 5356 76 45 1 1
## 5357 77 45 1 1
## 5358 78 45 1 1
## 5359 79 45 1 1
## 5360 80 45 1 1
## 5361 81 45 1 1
## 5362 82 45 1 1
## 5363 83 45 1 1
## 5364 84 45 1 1
## 5365 85 45 1 1
## 5366 86 45 1 1
## 5367 87 45 1 1
## 5368 88 45 1 1
## 5369 89 45 1 1
## 5370 90 45 1 1
## 5371 91 45 1 1
## 5372 92 45 1 1
## 5373 93 45 1 1
## 5374 94 45 1 1
## 5375 95 45 1 1
## 5376 96 45 1 1
## 5377 97 45 1 1
## 5378 98 45 1 1
## 5379 99 45 1 1
## 5380 100 45 1 1
## 5381 101 45 1 1
## 5382 102 45 1 1
## 5383 103 45 1 1
## 5384 104 45 1 1
## 5385 105 45 1 1
## 5386 106 45 1 1
## 5387 107 45 1 1
## 5388 108 45 1 1
## 5389 109 45 1 1
## 5390 110 45 1 1
## 5391 111 45 1 1
## 5392 112 45 1 1
## 5393 113 45 1 1
## 5394 114 45 1 1
## 5395 115 45 1 1
## 5396 116 45 1 1
## 5397 117 45 1 1
## 5398 118 45 1 1
## 5399 119 45 1 1
## 5400 120 45 1 1
## 5401 1 46 1 1
## 5402 2 46 1 1
## 5403 3 46 1 1
## 5404 4 46 1 1
## 5405 5 46 1 1
## 5406 6 46 1 1
## 5407 7 46 1 1
## 5408 8 46 1 1
## 5409 9 46 1 1
## 5410 10 46 1 1
## 5411 11 46 1 1
## 5412 12 46 1 1
## 5413 13 46 1 1
## 5414 14 46 1 1
## 5415 15 46 1 1
## 5416 16 46 1 1
## 5417 17 46 1 1
## 5418 18 46 1 1
## 5419 19 46 1 1
## 5420 20 46 1 1
## 5421 21 46 1 1
## 5422 22 46 1 1
## 5423 23 46 1 1
## 5424 24 46 1 1
## 5425 25 46 1 1
## 5426 26 46 1 1
## 5427 27 46 1 1
## 5428 28 46 1 1
## 5429 29 46 1 1
## 5430 30 46 1 1
## 5431 31 46 1 1
## 5432 32 46 1 1
## 5433 33 46 1 1
## 5434 34 46 1 1
## 5435 35 46 1 1
## 5436 36 46 1 1
## 5437 37 46 1 1
## 5438 38 46 1 1
## 5439 39 46 1 1
## 5440 40 46 1 1
## 5441 41 46 1 1
## 5442 42 46 1 1
## 5443 43 46 1 1
## 5444 44 46 1 1
## 5445 45 46 1 1
## 5446 46 46 1 1
## 5447 47 46 1 1
## 5448 48 46 1 1
## 5449 49 46 1 1
## 5450 50 46 1 1
## 5451 51 46 1 1
## 5452 52 46 1 1
## 5453 53 46 1 1
## 5454 54 46 1 1
## 5455 55 46 1 1
## 5456 56 46 1 1
## 5457 57 46 1 1
## 5458 58 46 1 1
## 5459 59 46 1 1
## 5460 60 46 1 1
## 5461 61 46 1 1
## 5462 62 46 1 1
## 5463 63 46 1 1
## 5464 64 46 1 1
## 5465 65 46 1 1
## 5466 66 46 1 1
## 5467 67 46 1 1
## 5468 68 46 1 1
## 5469 69 46 1 1
## 5470 70 46 1 1
## 5471 71 46 1 1
## 5472 72 46 1 1
## 5473 73 46 1 1
## 5474 74 46 1 1
## 5475 75 46 1 1
## 5476 76 46 1 1
## 5477 77 46 1 1
## 5478 78 46 1 1
## 5479 79 46 1 1
## 5480 80 46 1 1
## 5481 81 46 1 1
## 5482 82 46 1 1
## 5483 83 46 1 1
## 5484 84 46 1 1
## 5485 85 46 1 1
## 5486 86 46 1 1
## 5487 87 46 1 1
## 5488 88 46 1 1
## 5489 89 46 1 1
## 5490 90 46 1 1
## 5491 91 46 1 1
## 5492 92 46 1 1
## 5493 93 46 1 1
## 5494 94 46 1 1
## 5495 95 46 1 1
## 5496 96 46 1 1
## 5497 97 46 1 1
## 5498 98 46 1 1
## 5499 99 46 1 1
## 5500 100 46 1 1
## 5501 101 46 1 1
## 5502 102 46 1 1
## 5503 103 46 1 1
## 5504 104 46 1 1
## 5505 105 46 1 1
## 5506 106 46 1 1
## 5507 107 46 1 1
## 5508 108 46 1 1
## 5509 109 46 1 1
## 5510 110 46 1 1
## 5511 111 46 1 1
## 5512 112 46 1 1
## 5513 113 46 1 1
## 5514 114 46 1 1
## 5515 115 46 1 1
## 5516 116 46 1 1
## 5517 117 46 1 1
## 5518 118 46 1 1
## 5519 119 46 1 1
## 5520 120 46 1 1
## 5521 1 47 1 1
## 5522 2 47 1 1
## 5523 3 47 1 1
## 5524 4 47 1 1
## 5525 5 47 1 1
## 5526 6 47 1 1
## 5527 7 47 1 1
## 5528 8 47 1 1
## 5529 9 47 1 1
## 5530 10 47 1 1
## 5531 11 47 1 1
## 5532 12 47 1 1
## 5533 13 47 1 1
## 5534 14 47 1 1
## 5535 15 47 1 1
## 5536 16 47 1 1
## 5537 17 47 1 1
## 5538 18 47 1 1
## 5539 19 47 1 1
## 5540 20 47 1 1
## 5541 21 47 1 1
## 5542 22 47 1 1
## 5543 23 47 1 1
## 5544 24 47 1 1
## 5545 25 47 1 1
## 5546 26 47 1 1
## 5547 27 47 1 1
## 5548 28 47 1 1
## 5549 29 47 1 1
## 5550 30 47 1 1
## 5551 31 47 1 1
## 5552 32 47 1 1
## 5553 33 47 1 1
## 5554 34 47 1 1
## 5555 35 47 1 1
## 5556 36 47 1 1
## 5557 37 47 1 1
## 5558 38 47 1 1
## 5559 39 47 1 1
## 5560 40 47 1 1
## 5561 41 47 1 1
## 5562 42 47 1 1
## 5563 43 47 1 1
## 5564 44 47 1 1
## 5565 45 47 1 1
## 5566 46 47 1 1
## 5567 47 47 1 1
## 5568 48 47 1 1
## 5569 49 47 1 1
## 5570 50 47 1 1
## 5571 51 47 1 1
## 5572 52 47 1 1
## 5573 53 47 1 1
## 5574 54 47 1 1
## 5575 55 47 1 1
## 5576 56 47 1 1
## 5577 57 47 1 1
## 5578 58 47 1 1
## 5579 59 47 1 1
## 5580 60 47 1 1
## 5581 61 47 1 1
## 5582 62 47 1 1
## 5583 63 47 1 1
## 5584 64 47 1 1
## 5585 65 47 1 1
## 5586 66 47 1 1
## 5587 67 47 1 1
## 5588 68 47 1 1
## 5589 69 47 1 1
## 5590 70 47 1 1
## 5591 71 47 1 1
## 5592 72 47 1 1
## 5593 73 47 1 1
## 5594 74 47 1 1
## 5595 75 47 1 1
## 5596 76 47 1 1
## 5597 77 47 1 1
## 5598 78 47 1 1
## 5599 79 47 1 1
## 5600 80 47 1 1
## 5601 81 47 1 1
## 5602 82 47 1 1
## 5603 83 47 1 1
## 5604 84 47 1 1
## 5605 85 47 1 1
## 5606 86 47 1 1
## 5607 87 47 1 1
## 5608 88 47 1 1
## 5609 89 47 1 1
## 5610 90 47 1 1
## 5611 91 47 1 1
## 5612 92 47 1 1
## 5613 93 47 1 1
## 5614 94 47 1 1
## 5615 95 47 1 1
## 5616 96 47 1 1
## 5617 97 47 1 1
## 5618 98 47 1 1
## 5619 99 47 1 1
## 5620 100 47 1 1
## 5621 101 47 1 1
## 5622 102 47 1 1
## 5623 103 47 1 1
## 5624 104 47 1 1
## 5625 105 47 1 1
## 5626 106 47 1 1
## 5627 107 47 1 1
## 5628 108 47 1 1
## 5629 109 47 1 1
## 5630 110 47 1 1
## 5631 111 47 1 1
## 5632 112 47 1 1
## 5633 113 47 1 1
## 5634 114 47 1 1
## 5635 115 47 1 1
## 5636 116 47 1 1
## 5637 117 47 1 1
## 5638 118 47 1 1
## 5639 119 47 1 1
## 5640 120 47 1 1
## 5641 1 48 1 1
## 5642 2 48 1 1
## 5643 3 48 1 1
## 5644 4 48 1 1
## 5645 5 48 1 1
## 5646 6 48 1 1
## 5647 7 48 1 1
## 5648 8 48 1 1
## 5649 9 48 1 1
## 5650 10 48 1 1
## 5651 11 48 1 1
## 5652 12 48 1 1
## 5653 13 48 1 1
## 5654 14 48 1 1
## 5655 15 48 1 1
## 5656 16 48 1 1
## 5657 17 48 1 1
## 5658 18 48 1 1
## 5659 19 48 1 1
## 5660 20 48 1 1
## 5661 21 48 1 1
## 5662 22 48 1 1
## 5663 23 48 1 1
## 5664 24 48 1 1
## 5665 25 48 1 1
## 5666 26 48 1 1
## 5667 27 48 1 1
## 5668 28 48 1 1
## 5669 29 48 1 1
## 5670 30 48 1 1
## 5671 31 48 1 1
## 5672 32 48 1 1
## 5673 33 48 1 1
## 5674 34 48 1 1
## 5675 35 48 1 1
## 5676 36 48 1 1
## 5677 37 48 1 1
## 5678 38 48 1 1
## 5679 39 48 1 1
## 5680 40 48 1 1
## 5681 41 48 1 1
## 5682 42 48 1 1
## 5683 43 48 1 1
## 5684 44 48 1 1
## 5685 45 48 1 1
## 5686 46 48 1 1
## 5687 47 48 1 1
## 5688 48 48 1 1
## 5689 49 48 1 1
## 5690 50 48 1 1
## 5691 51 48 1 1
## 5692 52 48 1 1
## 5693 53 48 1 1
## 5694 54 48 1 1
## 5695 55 48 1 1
## 5696 56 48 1 1
## 5697 57 48 1 1
## 5698 58 48 1 1
## 5699 59 48 1 1
## 5700 60 48 1 1
## 5701 61 48 1 1
## 5702 62 48 1 1
## 5703 63 48 1 1
## 5704 64 48 1 1
## 5705 65 48 1 1
## 5706 66 48 1 1
## 5707 67 48 1 1
## 5708 68 48 1 1
## 5709 69 48 1 1
## 5710 70 48 1 1
## 5711 71 48 1 1
## 5712 72 48 1 1
## 5713 73 48 1 1
## 5714 74 48 1 1
## 5715 75 48 1 1
## 5716 76 48 1 1
## 5717 77 48 1 1
## 5718 78 48 1 1
## 5719 79 48 1 1
## 5720 80 48 1 1
## 5721 81 48 1 1
## 5722 82 48 1 1
## 5723 83 48 1 1
## 5724 84 48 1 1
## 5725 85 48 1 1
## 5726 86 48 1 1
## 5727 87 48 1 1
## 5728 88 48 1 1
## 5729 89 48 1 1
## 5730 90 48 1 1
## 5731 91 48 1 1
## 5732 92 48 1 1
## 5733 93 48 1 1
## 5734 94 48 1 1
## 5735 95 48 1 1
## 5736 96 48 1 1
## 5737 97 48 1 1
## 5738 98 48 1 1
## 5739 99 48 1 1
## 5740 100 48 1 1
## 5741 101 48 1 1
## 5742 102 48 1 1
## 5743 103 48 1 1
## 5744 104 48 1 1
## 5745 105 48 1 1
## 5746 106 48 1 1
## 5747 107 48 1 1
## 5748 108 48 1 1
## 5749 109 48 1 1
## 5750 110 48 1 1
## 5751 111 48 1 1
## 5752 112 48 1 1
## 5753 113 48 1 1
## 5754 114 48 1 1
## 5755 115 48 1 1
## 5756 116 48 1 1
## 5757 117 48 1 1
## 5758 118 48 1 1
## 5759 119 48 1 1
## 5760 120 48 1 1
## 5761 1 49 1 1
## 5762 2 49 1 1
## 5763 3 49 1 1
## 5764 4 49 1 1
## 5765 5 49 1 1
## 5766 6 49 1 1
## 5767 7 49 1 1
## 5768 8 49 1 1
## 5769 9 49 1 1
## 5770 10 49 1 1
## 5771 11 49 1 1
## 5772 12 49 1 1
## 5773 13 49 1 1
## 5774 14 49 1 1
## 5775 15 49 1 1
## 5776 16 49 1 1
## 5777 17 49 1 1
## 5778 18 49 1 1
## 5779 19 49 1 1
## 5780 20 49 1 1
## 5781 21 49 1 1
## 5782 22 49 1 1
## 5783 23 49 1 1
## 5784 24 49 1 1
## 5785 25 49 1 1
## 5786 26 49 1 1
## 5787 27 49 1 1
## 5788 28 49 1 1
## 5789 29 49 1 1
## 5790 30 49 1 1
## 5791 31 49 1 1
## 5792 32 49 1 1
## 5793 33 49 1 1
## 5794 34 49 1 1
## 5795 35 49 1 1
## 5796 36 49 1 1
## 5797 37 49 1 1
## 5798 38 49 1 1
## 5799 39 49 1 1
## 5800 40 49 1 1
## 5801 41 49 1 1
## 5802 42 49 1 1
## 5803 43 49 1 1
## 5804 44 49 1 1
## 5805 45 49 1 1
## 5806 46 49 1 1
## 5807 47 49 1 1
## 5808 48 49 1 1
## 5809 49 49 1 1
## 5810 50 49 1 1
## 5811 51 49 1 1
## 5812 52 49 1 1
## 5813 53 49 1 1
## 5814 54 49 1 1
## 5815 55 49 1 1
## 5816 56 49 1 1
## 5817 57 49 1 1
## 5818 58 49 1 1
## 5819 59 49 1 1
## 5820 60 49 1 1
## 5821 61 49 1 1
## 5822 62 49 1 1
## 5823 63 49 1 1
## 5824 64 49 1 1
## 5825 65 49 1 1
## 5826 66 49 1 1
## 5827 67 49 1 1
## 5828 68 49 1 1
## 5829 69 49 1 1
## 5830 70 49 1 1
## 5831 71 49 1 1
## 5832 72 49 1 1
## 5833 73 49 1 1
## 5834 74 49 1 1
## 5835 75 49 1 1
## 5836 76 49 1 1
## 5837 77 49 1 1
## 5838 78 49 1 1
## 5839 79 49 1 1
## 5840 80 49 1 1
## 5841 81 49 1 1
## 5842 82 49 1 1
## 5843 83 49 1 1
## 5844 84 49 1 1
## 5845 85 49 1 1
## 5846 86 49 1 1
## 5847 87 49 1 1
## 5848 88 49 1 1
## 5849 89 49 1 1
## 5850 90 49 1 1
## 5851 91 49 1 1
## 5852 92 49 1 1
## 5853 93 49 1 1
## 5854 94 49 1 1
## 5855 95 49 1 1
## 5856 96 49 1 1
## 5857 97 49 1 1
## 5858 98 49 1 1
## 5859 99 49 1 1
## 5860 100 49 1 1
## 5861 101 49 1 1
## 5862 102 49 1 1
## 5863 103 49 1 1
## 5864 104 49 1 1
## 5865 105 49 1 1
## 5866 106 49 1 1
## 5867 107 49 1 1
## 5868 108 49 1 1
## 5869 109 49 1 1
## 5870 110 49 1 1
## 5871 111 49 1 1
## 5872 112 49 1 1
## 5873 113 49 1 1
## 5874 114 49 1 1
## 5875 115 49 1 1
## 5876 116 49 1 1
## 5877 117 49 1 1
## 5878 118 49 1 1
## 5879 119 49 1 1
## 5880 120 49 1 1
## 5881 1 50 1 1
## 5882 2 50 1 1
## 5883 3 50 1 1
## 5884 4 50 1 1
## 5885 5 50 1 1
## 5886 6 50 1 1
## 5887 7 50 1 1
## 5888 8 50 1 1
## 5889 9 50 1 1
## 5890 10 50 1 1
## 5891 11 50 1 1
## 5892 12 50 1 1
## 5893 13 50 1 1
## 5894 14 50 1 1
## 5895 15 50 1 1
## 5896 16 50 1 1
## 5897 17 50 1 1
## 5898 18 50 1 1
## 5899 19 50 1 1
## 5900 20 50 1 1
## 5901 21 50 1 1
## 5902 22 50 1 1
## 5903 23 50 1 1
## 5904 24 50 1 1
## 5905 25 50 1 1
## 5906 26 50 1 1
## 5907 27 50 1 1
## 5908 28 50 1 1
## 5909 29 50 1 1
## 5910 30 50 1 1
## 5911 31 50 1 1
## 5912 32 50 1 1
## 5913 33 50 1 1
## 5914 34 50 1 1
## 5915 35 50 1 1
## 5916 36 50 1 1
## 5917 37 50 1 1
## 5918 38 50 1 1
## 5919 39 50 1 1
## 5920 40 50 1 1
## 5921 41 50 1 1
## 5922 42 50 1 1
## 5923 43 50 1 1
## 5924 44 50 1 1
## 5925 45 50 1 1
## 5926 46 50 1 1
## 5927 47 50 1 1
## 5928 48 50 1 1
## 5929 49 50 1 1
## 5930 50 50 1 1
## 5931 51 50 1 1
## 5932 52 50 1 1
## 5933 53 50 1 1
## 5934 54 50 1 1
## 5935 55 50 1 1
## 5936 56 50 1 1
## 5937 57 50 1 1
## 5938 58 50 1 1
## 5939 59 50 1 1
## 5940 60 50 1 1
## 5941 61 50 1 1
## 5942 62 50 1 1
## 5943 63 50 1 1
## 5944 64 50 1 1
## 5945 65 50 1 1
## 5946 66 50 1 1
## 5947 67 50 1 1
## 5948 68 50 1 1
## 5949 69 50 1 1
## 5950 70 50 1 1
## 5951 71 50 1 1
## 5952 72 50 1 1
## 5953 73 50 1 1
## 5954 74 50 1 1
## 5955 75 50 1 1
## 5956 76 50 1 1
## 5957 77 50 1 1
## 5958 78 50 1 1
## 5959 79 50 1 1
## 5960 80 50 1 1
## 5961 81 50 1 1
## 5962 82 50 1 1
## 5963 83 50 1 1
## 5964 84 50 1 1
## 5965 85 50 1 1
## 5966 86 50 1 1
## 5967 87 50 1 1
## 5968 88 50 1 1
## 5969 89 50 1 1
## 5970 90 50 1 1
## 5971 91 50 1 1
## 5972 92 50 1 1
## 5973 93 50 1 1
## 5974 94 50 1 1
## 5975 95 50 1 1
## 5976 96 50 1 1
## 5977 97 50 1 1
## 5978 98 50 1 1
## 5979 99 50 1 1
## 5980 100 50 1 1
## 5981 101 50 1 1
## 5982 102 50 1 1
## 5983 103 50 1 1
## 5984 104 50 1 1
## 5985 105 50 1 1
## 5986 106 50 1 1
## 5987 107 50 1 1
## 5988 108 50 1 1
## 5989 109 50 1 1
## 5990 110 50 1 1
## 5991 111 50 1 1
## 5992 112 50 1 1
## 5993 113 50 1 1
## 5994 114 50 1 1
## 5995 115 50 1 1
## 5996 116 50 1 1
## 5997 117 50 1 1
## 5998 118 50 1 1
## 5999 119 50 1 1
## 6000 120 50 1 1
## 6001 1 51 1 1
## 6002 2 51 1 1
## 6003 3 51 1 1
## 6004 4 51 1 1
## 6005 5 51 1 1
## 6006 6 51 1 1
## 6007 7 51 1 1
## 6008 8 51 1 1
## 6009 9 51 1 1
## 6010 10 51 1 1
## 6011 11 51 1 1
## 6012 12 51 1 1
## 6013 13 51 1 1
## 6014 14 51 1 1
## 6015 15 51 1 1
## 6016 16 51 1 1
## 6017 17 51 1 1
## 6018 18 51 1 1
## 6019 19 51 1 1
## 6020 20 51 1 1
## 6021 21 51 1 1
## 6022 22 51 1 1
## 6023 23 51 1 1
## 6024 24 51 1 1
## 6025 25 51 1 1
## 6026 26 51 1 1
## 6027 27 51 1 1
## 6028 28 51 1 1
## 6029 29 51 1 1
## 6030 30 51 1 1
## 6031 31 51 1 1
## 6032 32 51 1 1
## 6033 33 51 1 1
## 6034 34 51 1 1
## 6035 35 51 1 1
## 6036 36 51 1 1
## 6037 37 51 1 1
## 6038 38 51 1 1
## 6039 39 51 1 1
## 6040 40 51 1 1
## 6041 41 51 1 1
## 6042 42 51 1 1
## 6043 43 51 1 1
## 6044 44 51 1 1
## 6045 45 51 1 1
## 6046 46 51 1 1
## 6047 47 51 1 1
## 6048 48 51 1 1
## 6049 49 51 1 1
## 6050 50 51 1 1
## 6051 51 51 1 1
## 6052 52 51 1 1
## 6053 53 51 1 1
## 6054 54 51 1 1
## 6055 55 51 1 1
## 6056 56 51 1 1
## 6057 57 51 1 1
## 6058 58 51 1 1
## 6059 59 51 1 1
## 6060 60 51 1 1
## 6061 61 51 1 1
## 6062 62 51 1 1
## 6063 63 51 1 1
## 6064 64 51 1 1
## 6065 65 51 1 1
## 6066 66 51 1 1
## 6067 67 51 1 1
## 6068 68 51 1 1
## 6069 69 51 1 1
## 6070 70 51 1 1
## 6071 71 51 1 1
## 6072 72 51 1 1
## 6073 73 51 1 1
## 6074 74 51 1 1
## 6075 75 51 1 1
## 6076 76 51 1 1
## 6077 77 51 1 1
## 6078 78 51 1 1
## 6079 79 51 1 1
## 6080 80 51 1 1
## 6081 81 51 1 1
## 6082 82 51 1 1
## 6083 83 51 1 1
## 6084 84 51 1 1
## 6085 85 51 1 1
## 6086 86 51 1 1
## 6087 87 51 1 1
## 6088 88 51 1 1
## 6089 89 51 1 1
## 6090 90 51 1 1
## 6091 91 51 1 1
## 6092 92 51 1 1
## 6093 93 51 1 1
## 6094 94 51 1 1
## 6095 95 51 1 1
## 6096 96 51 1 1
## 6097 97 51 1 1
## 6098 98 51 1 1
## 6099 99 51 1 1
## 6100 100 51 1 1
## 6101 101 51 1 1
## 6102 102 51 1 1
## 6103 103 51 1 1
## 6104 104 51 1 1
## 6105 105 51 1 1
## 6106 106 51 1 1
## 6107 107 51 1 1
## 6108 108 51 1 1
## 6109 109 51 1 1
## 6110 110 51 1 1
## 6111 111 51 1 1
## 6112 112 51 1 1
## 6113 113 51 1 1
## 6114 114 51 1 1
## 6115 115 51 1 1
## 6116 116 51 1 1
## 6117 117 51 1 1
## 6118 118 51 1 1
## 6119 119 51 1 1
## 6120 120 51 1 1
## 6121 1 52 1 1
## 6122 2 52 1 1
## 6123 3 52 1 1
## 6124 4 52 1 1
## 6125 5 52 1 1
## 6126 6 52 1 1
## 6127 7 52 1 1
## 6128 8 52 1 1
## 6129 9 52 1 1
## 6130 10 52 1 1
## 6131 11 52 1 1
## 6132 12 52 1 1
## 6133 13 52 1 1
## 6134 14 52 1 1
## 6135 15 52 1 1
## 6136 16 52 1 1
## 6137 17 52 1 1
## 6138 18 52 1 1
## 6139 19 52 1 1
## 6140 20 52 1 1
## 6141 21 52 1 1
## 6142 22 52 1 1
## 6143 23 52 1 1
## 6144 24 52 1 1
## 6145 25 52 1 1
## 6146 26 52 1 1
## 6147 27 52 1 1
## 6148 28 52 1 1
## 6149 29 52 1 1
## 6150 30 52 1 1
## 6151 31 52 1 1
## 6152 32 52 1 1
## 6153 33 52 1 1
## 6154 34 52 1 1
## 6155 35 52 1 1
## 6156 36 52 1 1
## 6157 37 52 1 1
## 6158 38 52 1 1
## 6159 39 52 1 1
## 6160 40 52 1 1
## 6161 41 52 1 1
## 6162 42 52 1 1
## 6163 43 52 1 1
## 6164 44 52 1 1
## 6165 45 52 1 1
## 6166 46 52 1 1
## 6167 47 52 1 1
## 6168 48 52 1 1
## 6169 49 52 1 1
## 6170 50 52 1 1
## 6171 51 52 1 1
## 6172 52 52 1 1
## 6173 53 52 1 1
## 6174 54 52 1 1
## 6175 55 52 1 1
## 6176 56 52 1 1
## 6177 57 52 1 1
## 6178 58 52 1 1
## 6179 59 52 1 1
## 6180 60 52 1 1
## 6181 61 52 1 1
## 6182 62 52 1 1
## 6183 63 52 1 1
## 6184 64 52 1 1
## 6185 65 52 1 1
## 6186 66 52 1 1
## 6187 67 52 1 1
## 6188 68 52 1 1
## 6189 69 52 1 1
## 6190 70 52 1 1
## 6191 71 52 1 1
## 6192 72 52 1 1
## 6193 73 52 1 1
## 6194 74 52 1 1
## 6195 75 52 1 1
## 6196 76 52 1 1
## 6197 77 52 1 1
## 6198 78 52 1 1
## 6199 79 52 1 1
## 6200 80 52 1 1
## 6201 81 52 1 1
## 6202 82 52 1 1
## 6203 83 52 1 1
## 6204 84 52 1 1
## 6205 85 52 1 1
## 6206 86 52 1 1
## 6207 87 52 1 1
## 6208 88 52 1 1
## 6209 89 52 1 1
## 6210 90 52 1 1
## 6211 91 52 1 1
## 6212 92 52 1 1
## 6213 93 52 1 1
## 6214 94 52 1 1
## 6215 95 52 1 1
## 6216 96 52 1 1
## 6217 97 52 1 1
## 6218 98 52 1 1
## 6219 99 52 1 1
## 6220 100 52 1 1
## 6221 101 52 1 1
## 6222 102 52 1 1
## 6223 103 52 1 1
## 6224 104 52 1 1
## 6225 105 52 1 1
## 6226 106 52 1 1
## 6227 107 52 1 1
## 6228 108 52 1 1
## 6229 109 52 1 1
## 6230 110 52 1 1
## 6231 111 52 1 1
## 6232 112 52 1 1
## 6233 113 52 1 1
## 6234 114 52 1 1
## 6235 115 52 1 1
## 6236 116 52 1 1
## 6237 117 52 1 1
## 6238 118 52 1 1
## 6239 119 52 1 1
## 6240 120 52 1 1
## 6241 1 53 1 1
## 6242 2 53 1 1
## 6243 3 53 1 1
## 6244 4 53 1 1
## 6245 5 53 1 1
## 6246 6 53 1 1
## 6247 7 53 1 1
## 6248 8 53 1 1
## 6249 9 53 1 1
## 6250 10 53 1 1
## 6251 11 53 1 1
## 6252 12 53 1 1
## 6253 13 53 1 1
## 6254 14 53 1 1
## 6255 15 53 1 1
## 6256 16 53 1 1
## 6257 17 53 1 1
## 6258 18 53 1 1
## 6259 19 53 1 1
## 6260 20 53 1 1
## 6261 21 53 1 1
## 6262 22 53 1 1
## 6263 23 53 1 1
## 6264 24 53 1 1
## 6265 25 53 1 1
## 6266 26 53 1 1
## 6267 27 53 1 1
## 6268 28 53 1 1
## 6269 29 53 1 1
## 6270 30 53 1 1
## 6271 31 53 1 1
## 6272 32 53 1 1
## 6273 33 53 1 1
## 6274 34 53 1 1
## 6275 35 53 1 1
## 6276 36 53 1 1
## 6277 37 53 1 1
## 6278 38 53 1 1
## 6279 39 53 1 1
## 6280 40 53 1 1
## 6281 41 53 1 1
## 6282 42 53 1 1
## 6283 43 53 1 1
## 6284 44 53 1 1
## 6285 45 53 1 1
## 6286 46 53 1 1
## 6287 47 53 1 1
## 6288 48 53 1 1
## 6289 49 53 1 1
## 6290 50 53 1 1
## 6291 51 53 1 1
## 6292 52 53 1 1
## 6293 53 53 1 1
## 6294 54 53 1 1
## 6295 55 53 1 1
## 6296 56 53 1 1
## 6297 57 53 1 1
## 6298 58 53 1 1
## 6299 59 53 1 1
## 6300 60 53 1 1
## 6301 61 53 1 1
## 6302 62 53 1 1
## 6303 63 53 1 1
## 6304 64 53 1 1
## 6305 65 53 1 1
## 6306 66 53 1 1
## 6307 67 53 1 1
## 6308 68 53 1 1
## 6309 69 53 1 1
## 6310 70 53 1 1
## 6311 71 53 1 1
## 6312 72 53 1 1
## 6313 73 53 1 1
## 6314 74 53 1 1
## 6315 75 53 1 1
## 6316 76 53 1 1
## 6317 77 53 1 1
## 6318 78 53 1 1
## 6319 79 53 1 1
## 6320 80 53 1 1
## 6321 81 53 1 1
## 6322 82 53 1 1
## 6323 83 53 1 1
## 6324 84 53 1 1
## 6325 85 53 1 1
## 6326 86 53 1 1
## 6327 87 53 1 1
## 6328 88 53 1 1
## 6329 89 53 1 1
## 6330 90 53 1 1
## 6331 91 53 1 1
## 6332 92 53 1 1
## 6333 93 53 1 1
## 6334 94 53 1 1
## 6335 95 53 1 1
## 6336 96 53 1 1
## 6337 97 53 1 1
## 6338 98 53 1 1
## 6339 99 53 1 1
## 6340 100 53 1 1
## 6341 101 53 1 1
## 6342 102 53 1 1
## 6343 103 53 1 1
## 6344 104 53 1 1
## 6345 105 53 1 1
## 6346 106 53 1 1
## 6347 107 53 1 1
## 6348 108 53 1 1
## 6349 109 53 1 1
## 6350 110 53 1 1
## 6351 111 53 1 1
## 6352 112 53 1 1
## 6353 113 53 1 1
## 6354 114 53 1 1
## 6355 115 53 1 1
## 6356 116 53 1 1
## 6357 117 53 1 1
## 6358 118 53 1 1
## 6359 119 53 1 1
## 6360 120 53 1 1
## 6361 1 54 1 1
## 6362 2 54 1 1
## 6363 3 54 1 1
## 6364 4 54 1 1
## 6365 5 54 1 1
## 6366 6 54 1 1
## 6367 7 54 1 1
## 6368 8 54 1 1
## 6369 9 54 1 1
## 6370 10 54 1 1
## 6371 11 54 1 1
## 6372 12 54 1 1
## 6373 13 54 1 1
## 6374 14 54 1 1
## 6375 15 54 1 1
## 6376 16 54 1 1
## 6377 17 54 1 1
## 6378 18 54 1 1
## 6379 19 54 1 1
## 6380 20 54 1 1
## 6381 21 54 1 1
## 6382 22 54 1 1
## 6383 23 54 1 1
## 6384 24 54 1 1
## 6385 25 54 1 1
## 6386 26 54 1 1
## 6387 27 54 1 1
## 6388 28 54 1 1
## 6389 29 54 1 1
## 6390 30 54 1 1
## 6391 31 54 1 1
## 6392 32 54 1 1
## 6393 33 54 1 1
## 6394 34 54 1 1
## 6395 35 54 1 1
## 6396 36 54 1 1
## 6397 37 54 1 1
## 6398 38 54 1 1
## 6399 39 54 1 1
## 6400 40 54 1 1
## 6401 41 54 1 1
## 6402 42 54 1 1
## 6403 43 54 1 1
## 6404 44 54 1 1
## 6405 45 54 1 1
## 6406 46 54 1 1
## 6407 47 54 1 1
## 6408 48 54 1 1
## 6409 49 54 1 1
## 6410 50 54 1 1
## 6411 51 54 1 1
## 6412 52 54 1 1
## 6413 53 54 1 1
## 6414 54 54 1 1
## 6415 55 54 1 1
## 6416 56 54 1 1
## 6417 57 54 1 1
## 6418 58 54 1 1
## 6419 59 54 1 1
## 6420 60 54 1 1
## 6421 61 54 1 1
## 6422 62 54 1 1
## 6423 63 54 1 1
## 6424 64 54 1 1
## 6425 65 54 1 1
## 6426 66 54 1 1
## 6427 67 54 1 1
## 6428 68 54 1 1
## 6429 69 54 1 1
## 6430 70 54 1 1
## 6431 71 54 1 1
## 6432 72 54 1 1
## 6433 73 54 1 1
## 6434 74 54 1 1
## 6435 75 54 1 1
## 6436 76 54 1 1
## 6437 77 54 1 1
## 6438 78 54 1 1
## 6439 79 54 1 1
## 6440 80 54 1 1
## 6441 81 54 1 1
## 6442 82 54 1 1
## 6443 83 54 1 1
## 6444 84 54 1 1
## 6445 85 54 1 1
## 6446 86 54 1 1
## 6447 87 54 1 1
## 6448 88 54 1 1
## 6449 89 54 1 1
## 6450 90 54 1 1
## 6451 91 54 1 1
## 6452 92 54 1 1
## 6453 93 54 1 1
## 6454 94 54 1 1
## 6455 95 54 1 1
## 6456 96 54 1 1
## 6457 97 54 1 1
## 6458 98 54 1 1
## 6459 99 54 1 1
## 6460 100 54 1 1
## 6461 101 54 1 1
## 6462 102 54 1 1
## 6463 103 54 1 1
## 6464 104 54 1 1
## 6465 105 54 1 1
## 6466 106 54 1 1
## 6467 107 54 1 1
## 6468 108 54 1 1
## 6469 109 54 1 1
## 6470 110 54 1 1
## 6471 111 54 1 1
## 6472 112 54 1 1
## 6473 113 54 1 1
## 6474 114 54 1 1
## 6475 115 54 1 1
## 6476 116 54 1 1
## 6477 117 54 1 1
## 6478 118 54 1 1
## 6479 119 54 1 1
## 6480 120 54 1 1
## 6481 1 55 1 1
## 6482 2 55 1 1
## 6483 3 55 1 1
## 6484 4 55 1 1
## 6485 5 55 1 1
## 6486 6 55 1 1
## 6487 7 55 1 1
## 6488 8 55 1 1
## 6489 9 55 1 1
## 6490 10 55 1 1
## 6491 11 55 1 1
## 6492 12 55 1 1
## 6493 13 55 1 1
## 6494 14 55 1 1
## 6495 15 55 1 1
## 6496 16 55 1 1
## 6497 17 55 1 1
## 6498 18 55 1 1
## 6499 19 55 1 1
## 6500 20 55 1 1
## 6501 21 55 1 1
## 6502 22 55 1 1
## 6503 23 55 1 1
## 6504 24 55 1 1
## 6505 25 55 1 1
## 6506 26 55 1 1
## 6507 27 55 1 1
## 6508 28 55 1 1
## 6509 29 55 1 1
## 6510 30 55 1 1
## 6511 31 55 1 1
## 6512 32 55 1 1
## 6513 33 55 1 1
## 6514 34 55 1 1
## 6515 35 55 1 1
## 6516 36 55 1 1
## 6517 37 55 1 1
## 6518 38 55 1 1
## 6519 39 55 1 1
## 6520 40 55 1 1
## 6521 41 55 1 1
## 6522 42 55 1 1
## 6523 43 55 1 1
## 6524 44 55 1 1
## 6525 45 55 1 1
## 6526 46 55 1 1
## 6527 47 55 1 1
## 6528 48 55 1 1
## 6529 49 55 1 1
## 6530 50 55 1 1
## 6531 51 55 1 1
## 6532 52 55 1 1
## 6533 53 55 1 1
## 6534 54 55 1 1
## 6535 55 55 1 1
## 6536 56 55 1 1
## 6537 57 55 1 1
## 6538 58 55 1 1
## 6539 59 55 1 1
## 6540 60 55 1 1
## 6541 61 55 1 1
## 6542 62 55 1 1
## 6543 63 55 1 1
## 6544 64 55 1 1
## 6545 65 55 1 1
## 6546 66 55 1 1
## 6547 67 55 1 1
## 6548 68 55 1 1
## 6549 69 55 1 1
## 6550 70 55 1 1
## 6551 71 55 1 1
## 6552 72 55 1 1
## 6553 73 55 1 1
## 6554 74 55 1 1
## 6555 75 55 1 1
## 6556 76 55 1 1
## 6557 77 55 1 1
## 6558 78 55 1 1
## 6559 79 55 1 1
## 6560 80 55 1 1
## 6561 81 55 1 1
## 6562 82 55 1 1
## 6563 83 55 1 1
## 6564 84 55 1 1
## 6565 85 55 1 1
## 6566 86 55 1 1
## 6567 87 55 1 1
## 6568 88 55 1 1
## 6569 89 55 1 1
## 6570 90 55 1 1
## 6571 91 55 1 1
## 6572 92 55 1 1
## 6573 93 55 1 1
## 6574 94 55 1 1
## 6575 95 55 1 1
## 6576 96 55 1 1
## 6577 97 55 1 1
## 6578 98 55 1 1
## 6579 99 55 1 1
## 6580 100 55 1 1
## 6581 101 55 1 1
## 6582 102 55 1 1
## 6583 103 55 1 1
## 6584 104 55 1 1
## 6585 105 55 1 1
## 6586 106 55 1 1
## 6587 107 55 1 1
## 6588 108 55 1 1
## 6589 109 55 1 1
## 6590 110 55 1 1
## 6591 111 55 1 1
## 6592 112 55 1 1
## 6593 113 55 1 1
## 6594 114 55 1 1
## 6595 115 55 1 1
## 6596 116 55 1 1
## 6597 117 55 1 1
## 6598 118 55 1 1
## 6599 119 55 1 1
## 6600 120 55 1 1
## 6601 1 56 1 1
## 6602 2 56 1 1
## 6603 3 56 1 1
## 6604 4 56 1 1
## 6605 5 56 1 1
## 6606 6 56 1 1
## 6607 7 56 1 1
## 6608 8 56 1 1
## 6609 9 56 1 1
## 6610 10 56 1 1
## 6611 11 56 1 1
## 6612 12 56 1 1
## 6613 13 56 1 1
## 6614 14 56 1 1
## 6615 15 56 1 1
## 6616 16 56 1 1
## 6617 17 56 1 1
## 6618 18 56 1 1
## 6619 19 56 1 1
## 6620 20 56 1 1
## 6621 21 56 1 1
## 6622 22 56 1 1
## 6623 23 56 1 1
## 6624 24 56 1 1
## 6625 25 56 1 1
## 6626 26 56 1 1
## 6627 27 56 1 1
## 6628 28 56 1 1
## 6629 29 56 1 1
## 6630 30 56 1 1
## 6631 31 56 1 1
## 6632 32 56 1 1
## 6633 33 56 1 1
## 6634 34 56 1 1
## 6635 35 56 1 1
## 6636 36 56 1 1
## 6637 37 56 1 1
## 6638 38 56 1 1
## 6639 39 56 1 1
## 6640 40 56 1 1
## 6641 41 56 1 1
## 6642 42 56 1 1
## 6643 43 56 1 1
## 6644 44 56 1 1
## 6645 45 56 1 1
## 6646 46 56 1 1
## 6647 47 56 1 1
## 6648 48 56 1 1
## 6649 49 56 1 1
## 6650 50 56 1 1
## 6651 51 56 1 1
## 6652 52 56 1 1
## 6653 53 56 1 1
## 6654 54 56 1 1
## 6655 55 56 1 1
## 6656 56 56 1 1
## 6657 57 56 1 1
## 6658 58 56 1 1
## 6659 59 56 1 1
## 6660 60 56 1 1
## 6661 61 56 1 1
## 6662 62 56 1 1
## 6663 63 56 1 1
## 6664 64 56 1 1
## 6665 65 56 1 1
## 6666 66 56 1 1
## 6667 67 56 1 1
## 6668 68 56 1 1
## 6669 69 56 1 1
## 6670 70 56 1 1
## 6671 71 56 1 1
## 6672 72 56 1 1
## 6673 73 56 1 1
## 6674 74 56 1 1
## 6675 75 56 1 1
## 6676 76 56 1 1
## 6677 77 56 1 1
## 6678 78 56 1 1
## 6679 79 56 1 1
## 6680 80 56 1 1
## 6681 81 56 1 1
## 6682 82 56 1 1
## 6683 83 56 1 1
## 6684 84 56 1 1
## 6685 85 56 1 1
## 6686 86 56 1 1
## 6687 87 56 1 1
## 6688 88 56 1 1
## 6689 89 56 1 1
## 6690 90 56 1 1
## 6691 91 56 1 1
## 6692 92 56 1 1
## 6693 93 56 1 1
## 6694 94 56 1 1
## 6695 95 56 1 1
## 6696 96 56 1 1
## 6697 97 56 1 1
## 6698 98 56 1 1
## 6699 99 56 1 1
## 6700 100 56 1 1
## 6701 101 56 1 1
## 6702 102 56 1 1
## 6703 103 56 1 1
## 6704 104 56 1 1
## 6705 105 56 1 1
## 6706 106 56 1 1
## 6707 107 56 1 1
## 6708 108 56 1 1
## 6709 109 56 1 1
## 6710 110 56 1 1
## 6711 111 56 1 1
## 6712 112 56 1 1
## 6713 113 56 1 1
## 6714 114 56 1 1
## 6715 115 56 1 1
## 6716 116 56 1 1
## 6717 117 56 1 1
## 6718 118 56 1 1
## 6719 119 56 1 1
## 6720 120 56 1 1
## 6721 1 57 1 1
## 6722 2 57 1 1
## 6723 3 57 1 1
## 6724 4 57 1 1
## 6725 5 57 1 1
## 6726 6 57 1 1
## 6727 7 57 1 1
## 6728 8 57 1 1
## 6729 9 57 1 1
## 6730 10 57 1 1
## 6731 11 57 1 1
## 6732 12 57 1 1
## 6733 13 57 1 1
## 6734 14 57 1 1
## 6735 15 57 1 1
## 6736 16 57 1 1
## 6737 17 57 1 1
## 6738 18 57 1 1
## 6739 19 57 1 1
## 6740 20 57 1 1
## 6741 21 57 1 1
## 6742 22 57 1 1
## 6743 23 57 1 1
## 6744 24 57 1 1
## 6745 25 57 1 1
## 6746 26 57 1 1
## 6747 27 57 1 1
## 6748 28 57 1 1
## 6749 29 57 1 1
## 6750 30 57 1 1
## 6751 31 57 1 1
## 6752 32 57 1 1
## 6753 33 57 1 1
## 6754 34 57 1 1
## 6755 35 57 1 1
## 6756 36 57 1 1
## 6757 37 57 1 1
## 6758 38 57 1 1
## 6759 39 57 1 1
## 6760 40 57 1 1
## 6761 41 57 1 1
## 6762 42 57 1 1
## 6763 43 57 1 1
## 6764 44 57 1 1
## 6765 45 57 1 1
## 6766 46 57 1 1
## 6767 47 57 1 1
## 6768 48 57 1 1
## 6769 49 57 1 1
## 6770 50 57 1 1
## 6771 51 57 1 1
## 6772 52 57 1 1
## 6773 53 57 1 1
## 6774 54 57 1 1
## 6775 55 57 1 1
## 6776 56 57 1 1
## 6777 57 57 1 1
## 6778 58 57 1 1
## 6779 59 57 1 1
## 6780 60 57 1 1
## 6781 61 57 1 1
## 6782 62 57 1 1
## 6783 63 57 1 1
## 6784 64 57 1 1
## 6785 65 57 1 1
## 6786 66 57 1 1
## 6787 67 57 1 1
## 6788 68 57 1 1
## 6789 69 57 1 1
## 6790 70 57 1 1
## 6791 71 57 1 1
## 6792 72 57 1 1
## 6793 73 57 1 1
## 6794 74 57 1 1
## 6795 75 57 1 1
## 6796 76 57 1 1
## 6797 77 57 1 1
## 6798 78 57 1 1
## 6799 79 57 1 1
## 6800 80 57 1 1
## 6801 81 57 1 1
## 6802 82 57 1 1
## 6803 83 57 1 1
## 6804 84 57 1 1
## 6805 85 57 1 1
## 6806 86 57 1 1
## 6807 87 57 1 1
## 6808 88 57 1 1
## 6809 89 57 1 1
## 6810 90 57 1 1
## 6811 91 57 1 1
## 6812 92 57 1 1
## 6813 93 57 1 1
## 6814 94 57 1 1
## 6815 95 57 1 1
## 6816 96 57 1 1
## 6817 97 57 1 1
## 6818 98 57 1 1
## 6819 99 57 1 1
## 6820 100 57 1 1
## 6821 101 57 1 1
## 6822 102 57 1 1
## 6823 103 57 1 1
## 6824 104 57 1 1
## 6825 105 57 1 1
## 6826 106 57 1 1
## 6827 107 57 1 1
## 6828 108 57 1 1
## 6829 109 57 1 1
## 6830 110 57 1 1
## 6831 111 57 1 1
## 6832 112 57 1 1
## 6833 113 57 1 1
## 6834 114 57 1 1
## 6835 115 57 1 1
## 6836 116 57 1 1
## 6837 117 57 1 1
## 6838 118 57 1 1
## 6839 119 57 1 1
## 6840 120 57 1 1
## 6841 1 58 1 1
## 6842 2 58 1 1
## 6843 3 58 1 1
## 6844 4 58 1 1
## 6845 5 58 1 1
## 6846 6 58 1 1
## 6847 7 58 1 1
## 6848 8 58 1 1
## 6849 9 58 1 1
## 6850 10 58 1 1
## 6851 11 58 1 1
## 6852 12 58 1 1
## 6853 13 58 1 1
## 6854 14 58 1 1
## 6855 15 58 1 1
## 6856 16 58 1 1
## 6857 17 58 1 1
## 6858 18 58 1 1
## 6859 19 58 1 1
## 6860 20 58 1 1
## 6861 21 58 1 1
## 6862 22 58 1 1
## 6863 23 58 1 1
## 6864 24 58 1 1
## 6865 25 58 1 1
## 6866 26 58 1 1
## 6867 27 58 1 1
## 6868 28 58 1 1
## 6869 29 58 1 1
## 6870 30 58 1 1
## 6871 31 58 1 1
## 6872 32 58 1 1
## 6873 33 58 1 1
## 6874 34 58 1 1
## 6875 35 58 1 1
## 6876 36 58 1 1
## 6877 37 58 1 1
## 6878 38 58 1 1
## 6879 39 58 1 1
## 6880 40 58 1 1
## 6881 41 58 1 1
## 6882 42 58 1 1
## 6883 43 58 1 1
## 6884 44 58 1 1
## 6885 45 58 1 1
## 6886 46 58 1 1
## 6887 47 58 1 1
## 6888 48 58 1 1
## 6889 49 58 1 1
## 6890 50 58 1 1
## 6891 51 58 1 1
## 6892 52 58 1 1
## 6893 53 58 1 1
## 6894 54 58 1 1
## 6895 55 58 1 1
## 6896 56 58 1 1
## 6897 57 58 1 1
## 6898 58 58 1 1
## 6899 59 58 1 1
## 6900 60 58 1 1
## 6901 61 58 1 1
## 6902 62 58 1 1
## 6903 63 58 1 1
## 6904 64 58 1 1
## 6905 65 58 1 1
## 6906 66 58 1 1
## 6907 67 58 1 1
## 6908 68 58 1 1
## 6909 69 58 1 1
## 6910 70 58 1 1
## 6911 71 58 1 1
## 6912 72 58 1 1
## 6913 73 58 1 1
## 6914 74 58 1 1
## 6915 75 58 1 1
## 6916 76 58 1 1
## 6917 77 58 1 1
## 6918 78 58 1 1
## 6919 79 58 1 1
## 6920 80 58 1 1
## 6921 81 58 1 1
## 6922 82 58 1 1
## 6923 83 58 1 1
## 6924 84 58 1 1
## 6925 85 58 1 1
## 6926 86 58 1 1
## 6927 87 58 1 1
## 6928 88 58 1 1
## 6929 89 58 1 1
## 6930 90 58 1 1
## 6931 91 58 1 1
## 6932 92 58 1 1
## 6933 93 58 1 1
## 6934 94 58 1 1
## 6935 95 58 1 1
## 6936 96 58 1 1
## 6937 97 58 1 1
## 6938 98 58 1 1
## 6939 99 58 1 1
## 6940 100 58 1 1
## 6941 101 58 1 1
## 6942 102 58 1 1
## 6943 103 58 1 1
## 6944 104 58 1 1
## 6945 105 58 1 1
## 6946 106 58 1 1
## 6947 107 58 1 1
## 6948 108 58 1 1
## 6949 109 58 1 1
## 6950 110 58 1 1
## 6951 111 58 1 1
## 6952 112 58 1 1
## 6953 113 58 1 1
## 6954 114 58 1 1
## 6955 115 58 1 1
## 6956 116 58 1 1
## 6957 117 58 1 1
## 6958 118 58 1 1
## 6959 119 58 1 1
## 6960 120 58 1 1
## 6961 1 59 1 1
## 6962 2 59 1 1
## 6963 3 59 1 1
## 6964 4 59 1 1
## 6965 5 59 1 1
## 6966 6 59 1 1
## 6967 7 59 1 1
## 6968 8 59 1 1
## 6969 9 59 1 1
## 6970 10 59 1 1
## 6971 11 59 1 1
## 6972 12 59 1 1
## 6973 13 59 1 1
## 6974 14 59 1 1
## 6975 15 59 1 1
## 6976 16 59 1 1
## 6977 17 59 1 1
## 6978 18 59 1 1
## 6979 19 59 1 1
## 6980 20 59 1 1
## 6981 21 59 1 1
## 6982 22 59 1 1
## 6983 23 59 1 1
## 6984 24 59 1 1
## 6985 25 59 1 1
## 6986 26 59 1 1
## 6987 27 59 1 1
## 6988 28 59 1 1
## 6989 29 59 1 1
## 6990 30 59 1 1
## 6991 31 59 1 1
## 6992 32 59 1 1
## 6993 33 59 1 1
## 6994 34 59 1 1
## 6995 35 59 1 1
## 6996 36 59 1 1
## 6997 37 59 1 1
## 6998 38 59 1 1
## 6999 39 59 1 1
## 7000 40 59 1 1
## 7001 41 59 1 1
## 7002 42 59 1 1
## 7003 43 59 1 1
## 7004 44 59 1 1
## 7005 45 59 1 1
## 7006 46 59 1 1
## 7007 47 59 1 1
## 7008 48 59 1 1
## 7009 49 59 1 1
## 7010 50 59 1 1
## 7011 51 59 1 1
## 7012 52 59 1 1
## 7013 53 59 1 1
## 7014 54 59 1 1
## 7015 55 59 1 1
## 7016 56 59 1 1
## 7017 57 59 1 1
## 7018 58 59 1 1
## 7019 59 59 1 1
## 7020 60 59 1 1
## 7021 61 59 1 1
## 7022 62 59 1 1
## 7023 63 59 1 1
## 7024 64 59 1 1
## 7025 65 59 1 1
## 7026 66 59 1 1
## 7027 67 59 1 1
## 7028 68 59 1 1
## 7029 69 59 1 1
## 7030 70 59 1 1
## 7031 71 59 1 1
## 7032 72 59 1 1
## 7033 73 59 1 1
## 7034 74 59 1 1
## 7035 75 59 1 1
## 7036 76 59 1 1
## 7037 77 59 1 1
## 7038 78 59 1 1
## 7039 79 59 1 1
## 7040 80 59 1 1
## 7041 81 59 1 1
## 7042 82 59 1 1
## 7043 83 59 1 1
## 7044 84 59 1 1
## 7045 85 59 1 1
## 7046 86 59 1 1
## 7047 87 59 1 1
## 7048 88 59 1 1
## 7049 89 59 1 1
## 7050 90 59 1 1
## 7051 91 59 1 1
## 7052 92 59 1 1
## 7053 93 59 1 1
## 7054 94 59 1 1
## 7055 95 59 1 1
## 7056 96 59 1 1
## 7057 97 59 1 1
## 7058 98 59 1 1
## 7059 99 59 1 1
## 7060 100 59 1 1
## 7061 101 59 1 1
## 7062 102 59 1 1
## 7063 103 59 1 1
## 7064 104 59 1 1
## 7065 105 59 1 1
## 7066 106 59 1 1
## 7067 107 59 1 1
## 7068 108 59 1 1
## 7069 109 59 1 1
## 7070 110 59 1 1
## 7071 111 59 1 1
## 7072 112 59 1 1
## 7073 113 59 1 1
## 7074 114 59 1 1
## 7075 115 59 1 1
## 7076 116 59 1 1
## 7077 117 59 1 1
## 7078 118 59 1 1
## 7079 119 59 1 1
## 7080 120 59 1 1
## 7081 1 60 1 1
## 7082 2 60 1 1
## 7083 3 60 1 1
## 7084 4 60 1 1
## 7085 5 60 1 1
## 7086 6 60 1 1
## 7087 7 60 1 1
## 7088 8 60 1 1
## 7089 9 60 1 1
## 7090 10 60 1 1
## 7091 11 60 1 1
## 7092 12 60 1 1
## 7093 13 60 1 1
## 7094 14 60 1 1
## 7095 15 60 1 1
## 7096 16 60 1 1
## 7097 17 60 1 1
## 7098 18 60 1 1
## 7099 19 60 1 1
## 7100 20 60 1 1
## 7101 21 60 1 1
## 7102 22 60 1 1
## 7103 23 60 1 1
## 7104 24 60 1 1
## 7105 25 60 1 1
## 7106 26 60 1 1
## 7107 27 60 1 1
## 7108 28 60 1 1
## 7109 29 60 1 1
## 7110 30 60 1 1
## 7111 31 60 1 1
## 7112 32 60 1 1
## 7113 33 60 1 1
## 7114 34 60 1 1
## 7115 35 60 1 1
## 7116 36 60 1 1
## 7117 37 60 1 1
## 7118 38 60 1 1
## 7119 39 60 1 1
## 7120 40 60 1 1
## 7121 41 60 1 1
## 7122 42 60 1 1
## 7123 43 60 1 1
## 7124 44 60 1 1
## 7125 45 60 1 1
## 7126 46 60 1 1
## 7127 47 60 1 1
## 7128 48 60 1 1
## 7129 49 60 1 1
## 7130 50 60 1 1
## 7131 51 60 1 1
## 7132 52 60 1 1
## 7133 53 60 1 1
## 7134 54 60 1 1
## 7135 55 60 1 1
## 7136 56 60 1 1
## 7137 57 60 1 1
## 7138 58 60 1 1
## 7139 59 60 1 1
## 7140 60 60 1 1
## 7141 61 60 1 1
## 7142 62 60 1 1
## 7143 63 60 1 1
## 7144 64 60 1 1
## 7145 65 60 1 1
## 7146 66 60 1 1
## 7147 67 60 1 1
## 7148 68 60 1 1
## 7149 69 60 1 1
## 7150 70 60 1 1
## 7151 71 60 1 1
## 7152 72 60 1 1
## 7153 73 60 1 1
## 7154 74 60 1 1
## 7155 75 60 1 1
## 7156 76 60 1 1
## 7157 77 60 1 1
## 7158 78 60 1 1
## 7159 79 60 1 1
## 7160 80 60 1 1
## 7161 81 60 1 1
## 7162 82 60 1 1
## 7163 83 60 1 1
## 7164 84 60 1 1
## 7165 85 60 1 1
## 7166 86 60 1 1
## 7167 87 60 1 1
## 7168 88 60 1 1
## 7169 89 60 1 1
## 7170 90 60 1 1
## 7171 91 60 1 1
## 7172 92 60 1 1
## 7173 93 60 1 1
## 7174 94 60 1 1
## 7175 95 60 1 1
## 7176 96 60 1 1
## 7177 97 60 1 1
## 7178 98 60 1 1
## 7179 99 60 1 1
## 7180 100 60 1 1
## 7181 101 60 1 1
## 7182 102 60 1 1
## 7183 103 60 1 1
## 7184 104 60 1 1
## 7185 105 60 1 1
## 7186 106 60 1 1
## 7187 107 60 1 1
## 7188 108 60 1 1
## 7189 109 60 1 1
## 7190 110 60 1 1
## 7191 111 60 1 1
## 7192 112 60 1 1
## 7193 113 60 1 1
## 7194 114 60 1 1
## 7195 115 60 1 1
## 7196 116 60 1 1
## 7197 117 60 1 1
## 7198 118 60 1 1
## 7199 119 60 1 1
## 7200 120 60 1 1
## 7201 1 61 1 1
## 7202 2 61 1 1
## 7203 3 61 1 1
## 7204 4 61 1 1
## 7205 5 61 1 1
## 7206 6 61 1 1
## 7207 7 61 1 1
## 7208 8 61 1 1
## 7209 9 61 1 1
## 7210 10 61 1 1
## 7211 11 61 1 1
## 7212 12 61 1 1
## 7213 13 61 1 1
## 7214 14 61 1 1
## 7215 15 61 1 1
## 7216 16 61 1 1
## 7217 17 61 1 1
## 7218 18 61 1 1
## 7219 19 61 1 1
## 7220 20 61 1 1
## 7221 21 61 1 1
## 7222 22 61 1 1
## 7223 23 61 1 1
## 7224 24 61 1 1
## 7225 25 61 1 1
## 7226 26 61 1 1
## 7227 27 61 1 1
## 7228 28 61 1 1
## 7229 29 61 1 1
## 7230 30 61 1 1
## 7231 31 61 1 1
## 7232 32 61 1 1
## 7233 33 61 1 1
## 7234 34 61 1 1
## 7235 35 61 1 1
## 7236 36 61 1 1
## 7237 37 61 1 1
## 7238 38 61 1 1
## 7239 39 61 1 1
## 7240 40 61 1 1
## 7241 41 61 1 1
## 7242 42 61 1 1
## 7243 43 61 1 1
## 7244 44 61 1 1
## 7245 45 61 1 1
## 7246 46 61 1 1
## 7247 47 61 1 1
## 7248 48 61 1 1
## 7249 49 61 1 1
## 7250 50 61 1 1
## 7251 51 61 1 1
## 7252 52 61 1 1
## 7253 53 61 1 1
## 7254 54 61 1 1
## 7255 55 61 1 1
## 7256 56 61 1 1
## 7257 57 61 1 1
## 7258 58 61 1 1
## 7259 59 61 1 1
## 7260 60 61 1 1
## 7261 61 61 1 1
## 7262 62 61 1 1
## 7263 63 61 1 1
## 7264 64 61 1 1
## 7265 65 61 1 1
## 7266 66 61 1 1
## 7267 67 61 1 1
## 7268 68 61 1 1
## 7269 69 61 1 1
## 7270 70 61 1 1
## 7271 71 61 1 1
## 7272 72 61 1 1
## 7273 73 61 1 1
## 7274 74 61 1 1
## 7275 75 61 1 1
## 7276 76 61 1 1
## 7277 77 61 1 1
## 7278 78 61 1 1
## 7279 79 61 1 1
## 7280 80 61 1 1
## 7281 81 61 1 1
## 7282 82 61 1 1
## 7283 83 61 1 1
## 7284 84 61 1 1
## 7285 85 61 1 1
## 7286 86 61 1 1
## 7287 87 61 1 1
## 7288 88 61 1 1
## 7289 89 61 1 1
## 7290 90 61 1 1
## 7291 91 61 1 1
## 7292 92 61 1 1
## 7293 93 61 1 1
## 7294 94 61 1 1
## 7295 95 61 1 1
## 7296 96 61 1 1
## 7297 97 61 1 1
## 7298 98 61 1 1
## 7299 99 61 1 1
## 7300 100 61 1 1
## 7301 101 61 1 1
## 7302 102 61 1 1
## 7303 103 61 1 1
## 7304 104 61 1 1
## 7305 105 61 1 1
## 7306 106 61 1 1
## 7307 107 61 1 1
## 7308 108 61 1 1
## 7309 109 61 1 1
## 7310 110 61 1 1
## 7311 111 61 1 1
## 7312 112 61 1 1
## 7313 113 61 1 1
## 7314 114 61 1 1
## 7315 115 61 1 1
## 7316 116 61 1 1
## 7317 117 61 1 1
## 7318 118 61 1 1
## 7319 119 61 1 1
## 7320 120 61 1 1
## 7321 1 62 1 1
## 7322 2 62 1 1
## 7323 3 62 1 1
## 7324 4 62 1 1
## 7325 5 62 1 1
## 7326 6 62 1 1
## 7327 7 62 1 1
## 7328 8 62 1 1
## 7329 9 62 1 1
## 7330 10 62 1 1
## 7331 11 62 1 1
## 7332 12 62 1 1
## 7333 13 62 1 1
## 7334 14 62 1 1
## 7335 15 62 1 1
## 7336 16 62 1 1
## 7337 17 62 1 1
## 7338 18 62 1 1
## 7339 19 62 1 1
## 7340 20 62 1 1
## 7341 21 62 1 1
## 7342 22 62 1 1
## 7343 23 62 1 1
## 7344 24 62 1 1
## 7345 25 62 1 1
## 7346 26 62 1 1
## 7347 27 62 1 1
## 7348 28 62 1 1
## 7349 29 62 1 1
## 7350 30 62 1 1
## 7351 31 62 1 1
## 7352 32 62 1 1
## 7353 33 62 1 1
## 7354 34 62 1 1
## 7355 35 62 1 1
## 7356 36 62 1 1
## 7357 37 62 1 1
## 7358 38 62 1 1
## 7359 39 62 1 1
## 7360 40 62 1 1
## 7361 41 62 1 1
## 7362 42 62 1 1
## 7363 43 62 1 1
## 7364 44 62 1 1
## 7365 45 62 1 1
## 7366 46 62 1 1
## 7367 47 62 1 1
## 7368 48 62 1 1
## 7369 49 62 1 1
## 7370 50 62 1 1
## 7371 51 62 1 1
## 7372 52 62 1 1
## 7373 53 62 1 1
## 7374 54 62 1 1
## 7375 55 62 1 1
## 7376 56 62 1 1
## 7377 57 62 1 1
## 7378 58 62 1 1
## 7379 59 62 1 1
## 7380 60 62 1 1
## 7381 61 62 1 1
## 7382 62 62 1 1
## 7383 63 62 1 1
## 7384 64 62 1 1
## 7385 65 62 1 1
## 7386 66 62 1 1
## 7387 67 62 1 1
## 7388 68 62 1 1
## 7389 69 62 1 1
## 7390 70 62 1 1
## 7391 71 62 1 1
## 7392 72 62 1 1
## 7393 73 62 1 1
## 7394 74 62 1 1
## 7395 75 62 1 1
## 7396 76 62 1 1
## 7397 77 62 1 1
## 7398 78 62 1 1
## 7399 79 62 1 1
## 7400 80 62 1 1
## 7401 81 62 1 1
## 7402 82 62 1 1
## 7403 83 62 1 1
## 7404 84 62 1 1
## 7405 85 62 1 1
## 7406 86 62 1 1
## 7407 87 62 1 1
## 7408 88 62 1 1
## 7409 89 62 1 1
## 7410 90 62 1 1
## 7411 91 62 1 1
## 7412 92 62 1 1
## 7413 93 62 1 1
## 7414 94 62 1 1
## 7415 95 62 1 1
## 7416 96 62 1 1
## 7417 97 62 1 1
## 7418 98 62 1 1
## 7419 99 62 1 1
## 7420 100 62 1 1
## 7421 101 62 1 1
## 7422 102 62 1 1
## 7423 103 62 1 1
## 7424 104 62 1 1
## 7425 105 62 1 1
## 7426 106 62 1 1
## 7427 107 62 1 1
## 7428 108 62 1 1
## 7429 109 62 1 1
## 7430 110 62 1 1
## 7431 111 62 1 1
## 7432 112 62 1 1
## 7433 113 62 1 1
## 7434 114 62 1 1
## 7435 115 62 1 1
## 7436 116 62 1 1
## 7437 117 62 1 1
## 7438 118 62 1 1
## 7439 119 62 1 1
## 7440 120 62 1 1
## 7441 1 63 1 1
## 7442 2 63 1 1
## 7443 3 63 1 1
## 7444 4 63 1 1
## 7445 5 63 1 1
## 7446 6 63 1 1
## 7447 7 63 1 1
## 7448 8 63 1 1
## 7449 9 63 1 1
## 7450 10 63 1 1
## 7451 11 63 1 1
## 7452 12 63 1 1
## 7453 13 63 1 1
## 7454 14 63 1 1
## 7455 15 63 1 1
## 7456 16 63 1 1
## 7457 17 63 1 1
## 7458 18 63 1 1
## 7459 19 63 1 1
## 7460 20 63 1 1
## 7461 21 63 1 1
## 7462 22 63 1 1
## 7463 23 63 1 1
## 7464 24 63 1 1
## 7465 25 63 1 1
## 7466 26 63 1 1
## 7467 27 63 1 1
## 7468 28 63 1 1
## 7469 29 63 1 1
## 7470 30 63 1 1
## 7471 31 63 1 1
## 7472 32 63 1 1
## 7473 33 63 1 1
## 7474 34 63 1 1
## 7475 35 63 1 1
## 7476 36 63 1 1
## 7477 37 63 1 1
## 7478 38 63 1 1
## 7479 39 63 1 1
## 7480 40 63 1 1
## 7481 41 63 1 1
## 7482 42 63 1 1
## 7483 43 63 1 1
## 7484 44 63 1 1
## 7485 45 63 1 1
## 7486 46 63 1 1
## 7487 47 63 1 1
## 7488 48 63 1 1
## 7489 49 63 1 1
## 7490 50 63 1 1
## 7491 51 63 1 1
## 7492 52 63 1 1
## 7493 53 63 1 1
## 7494 54 63 1 1
## 7495 55 63 1 1
## 7496 56 63 1 1
## 7497 57 63 1 1
## 7498 58 63 1 1
## 7499 59 63 1 1
## 7500 60 63 1 1
## 7501 61 63 1 1
## 7502 62 63 1 1
## 7503 63 63 1 1
## 7504 64 63 1 1
## 7505 65 63 1 1
## 7506 66 63 1 1
## 7507 67 63 1 1
## 7508 68 63 1 1
## 7509 69 63 1 1
## 7510 70 63 1 1
## 7511 71 63 1 1
## 7512 72 63 1 1
## 7513 73 63 1 1
## 7514 74 63 1 1
## 7515 75 63 1 1
## 7516 76 63 1 1
## 7517 77 63 1 1
## 7518 78 63 1 1
## 7519 79 63 1 1
## 7520 80 63 1 1
## 7521 81 63 1 1
## 7522 82 63 1 1
## 7523 83 63 1 1
## 7524 84 63 1 1
## 7525 85 63 1 1
## 7526 86 63 1 1
## 7527 87 63 1 1
## 7528 88 63 1 1
## 7529 89 63 1 1
## 7530 90 63 1 1
## 7531 91 63 1 1
## 7532 92 63 1 1
## 7533 93 63 1 1
## 7534 94 63 1 1
## 7535 95 63 1 1
## 7536 96 63 1 1
## 7537 97 63 1 1
## 7538 98 63 1 1
## 7539 99 63 1 1
## 7540 100 63 1 1
## 7541 101 63 1 1
## 7542 102 63 1 1
## 7543 103 63 1 1
## 7544 104 63 1 1
## 7545 105 63 1 1
## 7546 106 63 1 1
## 7547 107 63 1 1
## 7548 108 63 1 1
## 7549 109 63 1 1
## 7550 110 63 1 1
## 7551 111 63 1 1
## 7552 112 63 1 1
## 7553 113 63 1 1
## 7554 114 63 1 1
## 7555 115 63 1 1
## 7556 116 63 1 1
## 7557 117 63 1 1
## 7558 118 63 1 1
## 7559 119 63 1 1
## 7560 120 63 1 1
## 7561 1 64 1 1
## 7562 2 64 1 1
## 7563 3 64 1 1
## 7564 4 64 1 1
## 7565 5 64 1 1
## 7566 6 64 1 1
## 7567 7 64 1 1
## 7568 8 64 1 1
## 7569 9 64 1 1
## 7570 10 64 1 1
## 7571 11 64 1 1
## 7572 12 64 1 1
## 7573 13 64 1 1
## 7574 14 64 1 1
## 7575 15 64 1 1
## 7576 16 64 1 1
## 7577 17 64 1 1
## 7578 18 64 1 1
## 7579 19 64 1 1
## 7580 20 64 1 1
## 7581 21 64 1 1
## 7582 22 64 1 1
## 7583 23 64 1 1
## 7584 24 64 1 1
## 7585 25 64 1 1
## 7586 26 64 1 1
## 7587 27 64 1 1
## 7588 28 64 1 1
## 7589 29 64 1 1
## 7590 30 64 1 1
## 7591 31 64 1 1
## 7592 32 64 1 1
## 7593 33 64 1 1
## 7594 34 64 1 1
## 7595 35 64 1 1
## 7596 36 64 1 1
## 7597 37 64 1 1
## 7598 38 64 1 1
## 7599 39 64 1 1
## 7600 40 64 1 1
## 7601 41 64 1 1
## 7602 42 64 1 1
## 7603 43 64 1 1
## 7604 44 64 1 1
## 7605 45 64 1 1
## 7606 46 64 1 1
## 7607 47 64 1 1
## 7608 48 64 1 1
## 7609 49 64 1 1
## 7610 50 64 1 1
## 7611 51 64 1 1
## 7612 52 64 1 1
## 7613 53 64 1 1
## 7614 54 64 1 1
## 7615 55 64 1 1
## 7616 56 64 1 1
## 7617 57 64 1 1
## 7618 58 64 1 1
## 7619 59 64 1 1
## 7620 60 64 1 1
## 7621 61 64 1 1
## 7622 62 64 1 1
## 7623 63 64 1 1
## 7624 64 64 1 1
## 7625 65 64 1 1
## 7626 66 64 1 1
## 7627 67 64 1 1
## 7628 68 64 1 1
## 7629 69 64 1 1
## 7630 70 64 1 1
## 7631 71 64 1 1
## 7632 72 64 1 1
## 7633 73 64 1 1
## 7634 74 64 1 1
## 7635 75 64 1 1
## 7636 76 64 1 1
## 7637 77 64 1 1
## 7638 78 64 1 1
## 7639 79 64 1 1
## 7640 80 64 1 1
## 7641 81 64 1 1
## 7642 82 64 1 1
## 7643 83 64 1 1
## 7644 84 64 1 1
## 7645 85 64 1 1
## 7646 86 64 1 1
## 7647 87 64 1 1
## 7648 88 64 1 1
## 7649 89 64 1 1
## 7650 90 64 1 1
## 7651 91 64 1 1
## 7652 92 64 1 1
## 7653 93 64 1 1
## 7654 94 64 1 1
## 7655 95 64 1 1
## 7656 96 64 1 1
## 7657 97 64 1 1
## 7658 98 64 1 1
## 7659 99 64 1 1
## 7660 100 64 1 1
## 7661 101 64 1 1
## 7662 102 64 1 1
## 7663 103 64 1 1
## 7664 104 64 1 1
## 7665 105 64 1 1
## 7666 106 64 1 1
## 7667 107 64 1 1
## 7668 108 64 1 1
## 7669 109 64 1 1
## 7670 110 64 1 1
## 7671 111 64 1 1
## 7672 112 64 1 1
## 7673 113 64 1 1
## 7674 114 64 1 1
## 7675 115 64 1 1
## 7676 116 64 1 1
## 7677 117 64 1 1
## 7678 118 64 1 1
## 7679 119 64 1 1
## 7680 120 64 1 1
## 7681 1 65 1 1
## 7682 2 65 1 1
## 7683 3 65 1 1
## 7684 4 65 1 1
## 7685 5 65 1 1
## 7686 6 65 1 1
## 7687 7 65 1 1
## 7688 8 65 1 1
## 7689 9 65 1 1
## 7690 10 65 1 1
## 7691 11 65 1 1
## 7692 12 65 1 1
## 7693 13 65 1 1
## 7694 14 65 1 1
## 7695 15 65 1 1
## 7696 16 65 1 1
## 7697 17 65 1 1
## 7698 18 65 1 1
## 7699 19 65 1 1
## 7700 20 65 1 1
## 7701 21 65 1 1
## 7702 22 65 1 1
## 7703 23 65 1 1
## 7704 24 65 1 1
## 7705 25 65 1 1
## 7706 26 65 1 1
## 7707 27 65 1 1
## 7708 28 65 1 1
## 7709 29 65 1 1
## 7710 30 65 1 1
## 7711 31 65 1 1
## 7712 32 65 1 1
## 7713 33 65 1 1
## 7714 34 65 1 1
## 7715 35 65 1 1
## 7716 36 65 1 1
## 7717 37 65 1 1
## 7718 38 65 1 1
## 7719 39 65 1 1
## 7720 40 65 1 1
## 7721 41 65 1 1
## 7722 42 65 1 1
## 7723 43 65 1 1
## 7724 44 65 1 1
## 7725 45 65 1 1
## 7726 46 65 1 1
## 7727 47 65 1 1
## 7728 48 65 1 1
## 7729 49 65 1 1
## 7730 50 65 1 1
## 7731 51 65 1 1
## 7732 52 65 1 1
## 7733 53 65 1 1
## 7734 54 65 1 1
## 7735 55 65 1 1
## 7736 56 65 1 1
## 7737 57 65 1 1
## 7738 58 65 1 1
## 7739 59 65 1 1
## 7740 60 65 1 1
## 7741 61 65 1 1
## 7742 62 65 1 1
## 7743 63 65 1 1
## 7744 64 65 1 1
## 7745 65 65 1 1
## 7746 66 65 1 1
## 7747 67 65 1 1
## 7748 68 65 1 1
## 7749 69 65 1 1
## 7750 70 65 1 1
## 7751 71 65 1 1
## 7752 72 65 1 1
## 7753 73 65 1 1
## 7754 74 65 1 1
## 7755 75 65 1 1
## 7756 76 65 1 1
## 7757 77 65 1 1
## 7758 78 65 1 1
## 7759 79 65 1 1
## 7760 80 65 1 1
## 7761 81 65 1 1
## 7762 82 65 1 1
## 7763 83 65 1 1
## 7764 84 65 1 1
## 7765 85 65 1 1
## 7766 86 65 1 1
## 7767 87 65 1 1
## 7768 88 65 1 1
## 7769 89 65 1 1
## 7770 90 65 1 1
## 7771 91 65 1 1
## 7772 92 65 1 1
## 7773 93 65 1 1
## 7774 94 65 1 1
## 7775 95 65 1 1
## 7776 96 65 1 1
## 7777 97 65 1 1
## 7778 98 65 1 1
## 7779 99 65 1 1
## 7780 100 65 1 1
## 7781 101 65 1 1
## 7782 102 65 1 1
## 7783 103 65 1 1
## 7784 104 65 1 1
## 7785 105 65 1 1
## 7786 106 65 1 1
## 7787 107 65 1 1
## 7788 108 65 1 1
## 7789 109 65 1 1
## 7790 110 65 1 1
## 7791 111 65 1 1
## 7792 112 65 1 1
## 7793 113 65 1 1
## 7794 114 65 1 1
## 7795 115 65 1 1
## 7796 116 65 1 1
## 7797 117 65 1 1
## 7798 118 65 1 1
## 7799 119 65 1 1
## 7800 120 65 1 1
## 7801 1 66 1 1
## 7802 2 66 1 1
## 7803 3 66 1 1
## 7804 4 66 1 1
## 7805 5 66 1 1
## 7806 6 66 1 1
## 7807 7 66 1 1
## 7808 8 66 1 1
## 7809 9 66 1 1
## 7810 10 66 1 1
## 7811 11 66 1 1
## 7812 12 66 1 1
## 7813 13 66 1 1
## 7814 14 66 1 1
## 7815 15 66 1 1
## 7816 16 66 1 1
## 7817 17 66 1 1
## 7818 18 66 1 1
## 7819 19 66 1 1
## 7820 20 66 1 1
## 7821 21 66 1 1
## 7822 22 66 1 1
## 7823 23 66 1 1
## 7824 24 66 1 1
## 7825 25 66 1 1
## 7826 26 66 1 1
## 7827 27 66 1 1
## 7828 28 66 1 1
## 7829 29 66 1 1
## 7830 30 66 1 1
## 7831 31 66 1 1
## 7832 32 66 1 1
## 7833 33 66 1 1
## 7834 34 66 1 1
## 7835 35 66 1 1
## 7836 36 66 1 1
## 7837 37 66 1 1
## 7838 38 66 1 1
## 7839 39 66 1 1
## 7840 40 66 1 1
## 7841 41 66 1 1
## 7842 42 66 1 1
## 7843 43 66 1 1
## 7844 44 66 1 1
## 7845 45 66 1 1
## 7846 46 66 1 1
## 7847 47 66 1 1
## 7848 48 66 1 1
## 7849 49 66 1 1
## 7850 50 66 1 1
## 7851 51 66 1 1
## 7852 52 66 1 1
## 7853 53 66 1 1
## 7854 54 66 1 1
## 7855 55 66 1 1
## 7856 56 66 1 1
## 7857 57 66 1 1
## 7858 58 66 1 1
## 7859 59 66 1 1
## 7860 60 66 1 1
## 7861 61 66 1 1
## 7862 62 66 1 1
## 7863 63 66 1 1
## 7864 64 66 1 1
## 7865 65 66 1 1
## 7866 66 66 1 1
## 7867 67 66 1 1
## 7868 68 66 1 1
## 7869 69 66 1 1
## 7870 70 66 1 1
## 7871 71 66 1 1
## 7872 72 66 1 1
## 7873 73 66 1 1
## 7874 74 66 1 1
## 7875 75 66 1 1
## 7876 76 66 1 1
## 7877 77 66 1 1
## 7878 78 66 1 1
## 7879 79 66 1 1
## 7880 80 66 1 1
## 7881 81 66 1 1
## 7882 82 66 1 1
## 7883 83 66 1 1
## 7884 84 66 1 1
## 7885 85 66 1 1
## 7886 86 66 1 1
## 7887 87 66 1 1
## 7888 88 66 1 1
## 7889 89 66 1 1
## 7890 90 66 1 1
## 7891 91 66 1 1
## 7892 92 66 1 1
## 7893 93 66 1 1
## 7894 94 66 1 1
## 7895 95 66 1 1
## 7896 96 66 1 1
## 7897 97 66 1 1
## 7898 98 66 1 1
## 7899 99 66 1 1
## 7900 100 66 1 1
## 7901 101 66 1 1
## 7902 102 66 1 1
## 7903 103 66 1 1
## 7904 104 66 1 1
## 7905 105 66 1 1
## 7906 106 66 1 1
## 7907 107 66 1 1
## 7908 108 66 1 1
## 7909 109 66 1 1
## 7910 110 66 1 1
## 7911 111 66 1 1
## 7912 112 66 1 1
## 7913 113 66 1 1
## 7914 114 66 1 1
## 7915 115 66 1 1
## 7916 116 66 1 1
## 7917 117 66 1 1
## 7918 118 66 1 1
## 7919 119 66 1 1
## 7920 120 66 1 1
## 7921 1 67 1 1
## 7922 2 67 1 1
## 7923 3 67 1 1
## 7924 4 67 1 1
## 7925 5 67 1 1
## 7926 6 67 1 1
## 7927 7 67 1 1
## 7928 8 67 1 1
## 7929 9 67 1 1
## 7930 10 67 1 1
## 7931 11 67 1 1
## 7932 12 67 1 1
## 7933 13 67 1 1
## 7934 14 67 1 1
## 7935 15 67 1 1
## 7936 16 67 1 1
## 7937 17 67 1 1
## 7938 18 67 1 1
## 7939 19 67 1 1
## 7940 20 67 1 1
## 7941 21 67 1 1
## 7942 22 67 1 1
## 7943 23 67 1 1
## 7944 24 67 1 1
## 7945 25 67 1 1
## 7946 26 67 1 1
## 7947 27 67 1 1
## 7948 28 67 1 1
## 7949 29 67 1 1
## 7950 30 67 1 1
## 7951 31 67 1 1
## 7952 32 67 1 1
## 7953 33 67 1 1
## 7954 34 67 1 1
## 7955 35 67 1 1
## 7956 36 67 1 1
## 7957 37 67 1 1
## 7958 38 67 1 1
## 7959 39 67 1 1
## 7960 40 67 1 1
## 7961 41 67 1 1
## 7962 42 67 1 1
## 7963 43 67 1 1
## 7964 44 67 1 1
## 7965 45 67 1 1
## 7966 46 67 1 1
## 7967 47 67 1 1
## 7968 48 67 1 1
## 7969 49 67 1 1
## 7970 50 67 1 1
## 7971 51 67 1 1
## 7972 52 67 1 1
## 7973 53 67 1 1
## 7974 54 67 1 1
## 7975 55 67 1 1
## 7976 56 67 1 1
## 7977 57 67 1 1
## 7978 58 67 1 1
## 7979 59 67 1 1
## 7980 60 67 1 1
## 7981 61 67 1 1
## 7982 62 67 1 1
## 7983 63 67 1 1
## 7984 64 67 1 1
## 7985 65 67 1 1
## 7986 66 67 1 1
## 7987 67 67 1 1
## 7988 68 67 1 1
## 7989 69 67 1 1
## 7990 70 67 1 1
## 7991 71 67 1 1
## 7992 72 67 1 1
## 7993 73 67 1 1
## 7994 74 67 1 1
## 7995 75 67 1 1
## 7996 76 67 1 1
## 7997 77 67 1 1
## 7998 78 67 1 1
## 7999 79 67 1 1
## 8000 80 67 1 1
## 8001 81 67 1 1
## 8002 82 67 1 1
## 8003 83 67 1 1
## 8004 84 67 1 1
## 8005 85 67 1 1
## 8006 86 67 1 1
## 8007 87 67 1 1
## 8008 88 67 1 1
## 8009 89 67 1 1
## 8010 90 67 1 1
## 8011 91 67 1 1
## 8012 92 67 1 1
## 8013 93 67 1 1
## 8014 94 67 1 1
## 8015 95 67 1 1
## 8016 96 67 1 1
## 8017 97 67 1 1
## 8018 98 67 1 1
## 8019 99 67 1 1
## 8020 100 67 1 1
## 8021 101 67 1 1
## 8022 102 67 1 1
## 8023 103 67 1 1
## 8024 104 67 1 1
## 8025 105 67 1 1
## 8026 106 67 1 1
## 8027 107 67 1 1
## 8028 108 67 1 1
## 8029 109 67 1 1
## 8030 110 67 1 1
## 8031 111 67 1 1
## 8032 112 67 1 1
## 8033 113 67 1 1
## 8034 114 67 1 1
## 8035 115 67 1 1
## 8036 116 67 1 1
## 8037 117 67 1 1
## 8038 118 67 1 1
## 8039 119 67 1 1
## 8040 120 67 1 1
## 8041 1 68 1 1
## 8042 2 68 1 1
## 8043 3 68 1 1
## 8044 4 68 1 1
## 8045 5 68 1 1
## 8046 6 68 1 1
## 8047 7 68 1 1
## 8048 8 68 1 1
## 8049 9 68 1 1
## 8050 10 68 1 1
## 8051 11 68 1 1
## 8052 12 68 1 1
## 8053 13 68 1 1
## 8054 14 68 1 1
## 8055 15 68 1 1
## 8056 16 68 1 1
## 8057 17 68 1 1
## 8058 18 68 1 1
## 8059 19 68 1 1
## 8060 20 68 1 1
## 8061 21 68 1 1
## 8062 22 68 1 1
## 8063 23 68 1 1
## 8064 24 68 1 1
## 8065 25 68 1 1
## 8066 26 68 1 1
## 8067 27 68 1 1
## 8068 28 68 1 1
## 8069 29 68 1 1
## 8070 30 68 1 1
## 8071 31 68 1 1
## 8072 32 68 1 1
## 8073 33 68 1 1
## 8074 34 68 1 1
## 8075 35 68 1 1
## 8076 36 68 1 1
## 8077 37 68 1 1
## 8078 38 68 1 1
## 8079 39 68 1 1
## 8080 40 68 1 1
## 8081 41 68 1 1
## 8082 42 68 1 1
## 8083 43 68 1 1
## 8084 44 68 1 1
## 8085 45 68 1 1
## 8086 46 68 1 1
## 8087 47 68 1 1
## 8088 48 68 1 1
## 8089 49 68 1 1
## 8090 50 68 1 1
## 8091 51 68 1 1
## 8092 52 68 1 1
## 8093 53 68 1 1
## 8094 54 68 1 1
## 8095 55 68 1 1
## 8096 56 68 1 1
## 8097 57 68 1 1
## 8098 58 68 1 1
## 8099 59 68 1 1
## 8100 60 68 1 1
## 8101 61 68 1 1
## 8102 62 68 1 1
## 8103 63 68 1 1
## 8104 64 68 1 1
## 8105 65 68 1 1
## 8106 66 68 1 1
## 8107 67 68 1 1
## 8108 68 68 1 1
## 8109 69 68 1 1
## 8110 70 68 1 1
## 8111 71 68 1 1
## 8112 72 68 1 1
## 8113 73 68 1 1
## 8114 74 68 1 1
## 8115 75 68 1 1
## 8116 76 68 1 1
## 8117 77 68 1 1
## 8118 78 68 1 1
## 8119 79 68 1 1
## 8120 80 68 1 1
## 8121 81 68 1 1
## 8122 82 68 1 1
## 8123 83 68 1 1
## 8124 84 68 1 1
## 8125 85 68 1 1
## 8126 86 68 1 1
## 8127 87 68 1 1
## 8128 88 68 1 1
## 8129 89 68 1 1
## 8130 90 68 1 1
## 8131 91 68 1 1
## 8132 92 68 1 1
## 8133 93 68 1 1
## 8134 94 68 1 1
## 8135 95 68 1 1
## 8136 96 68 1 1
## 8137 97 68 1 1
## 8138 98 68 1 1
## 8139 99 68 1 1
## 8140 100 68 1 1
## 8141 101 68 1 1
## 8142 102 68 1 1
## 8143 103 68 1 1
## 8144 104 68 1 1
## 8145 105 68 1 1
## 8146 106 68 1 1
## 8147 107 68 1 1
## 8148 108 68 1 1
## 8149 109 68 1 1
## 8150 110 68 1 1
## 8151 111 68 1 1
## 8152 112 68 1 1
## 8153 113 68 1 1
## 8154 114 68 1 1
## 8155 115 68 1 1
## 8156 116 68 1 1
## 8157 117 68 1 1
## 8158 118 68 1 1
## 8159 119 68 1 1
## 8160 120 68 1 1
## 8161 1 69 1 1
## 8162 2 69 1 1
## 8163 3 69 1 1
## 8164 4 69 1 1
## 8165 5 69 1 1
## 8166 6 69 1 1
## 8167 7 69 1 1
## 8168 8 69 1 1
## 8169 9 69 1 1
## 8170 10 69 1 1
## 8171 11 69 1 1
## 8172 12 69 1 1
## 8173 13 69 1 1
## 8174 14 69 1 1
## 8175 15 69 1 1
## 8176 16 69 1 1
## 8177 17 69 1 1
## 8178 18 69 1 1
## 8179 19 69 1 1
## 8180 20 69 1 1
## 8181 21 69 1 1
## 8182 22 69 1 1
## 8183 23 69 1 1
## 8184 24 69 1 1
## 8185 25 69 1 1
## 8186 26 69 1 1
## 8187 27 69 1 1
## 8188 28 69 1 1
## 8189 29 69 1 1
## 8190 30 69 1 1
## 8191 31 69 1 1
## 8192 32 69 1 1
## 8193 33 69 1 1
## 8194 34 69 1 1
## 8195 35 69 1 1
## 8196 36 69 1 1
## 8197 37 69 1 1
## 8198 38 69 1 1
## 8199 39 69 1 1
## 8200 40 69 1 1
## 8201 41 69 1 1
## 8202 42 69 1 1
## 8203 43 69 1 1
## 8204 44 69 1 1
## 8205 45 69 1 1
## 8206 46 69 1 1
## 8207 47 69 1 1
## 8208 48 69 1 1
## 8209 49 69 1 1
## 8210 50 69 1 1
## 8211 51 69 1 1
## 8212 52 69 1 1
## 8213 53 69 1 1
## 8214 54 69 1 1
## 8215 55 69 1 1
## 8216 56 69 1 1
## 8217 57 69 1 1
## 8218 58 69 1 1
## 8219 59 69 1 1
## 8220 60 69 1 1
## 8221 61 69 1 1
## 8222 62 69 1 1
## 8223 63 69 1 1
## 8224 64 69 1 1
## 8225 65 69 1 1
## 8226 66 69 1 1
## 8227 67 69 1 1
## 8228 68 69 1 1
## 8229 69 69 1 1
## 8230 70 69 1 1
## 8231 71 69 1 1
## 8232 72 69 1 1
## 8233 73 69 1 1
## 8234 74 69 1 1
## 8235 75 69 1 1
## 8236 76 69 1 1
## 8237 77 69 1 1
## 8238 78 69 1 1
## 8239 79 69 1 1
## 8240 80 69 1 1
## 8241 81 69 1 1
## 8242 82 69 1 1
## 8243 83 69 1 1
## 8244 84 69 1 1
## 8245 85 69 1 1
## 8246 86 69 1 1
## 8247 87 69 1 1
## 8248 88 69 1 1
## 8249 89 69 1 1
## 8250 90 69 1 1
## 8251 91 69 1 1
## 8252 92 69 1 1
## 8253 93 69 1 1
## 8254 94 69 1 1
## 8255 95 69 1 1
## 8256 96 69 1 1
## 8257 97 69 1 1
## 8258 98 69 1 1
## 8259 99 69 1 1
## 8260 100 69 1 1
## 8261 101 69 1 1
## 8262 102 69 1 1
## 8263 103 69 1 1
## 8264 104 69 1 1
## 8265 105 69 1 1
## 8266 106 69 1 1
## 8267 107 69 1 1
## 8268 108 69 1 1
## 8269 109 69 1 1
## 8270 110 69 1 1
## 8271 111 69 1 1
## 8272 112 69 1 1
## 8273 113 69 1 1
## 8274 114 69 1 1
## 8275 115 69 1 1
## 8276 116 69 1 1
## 8277 117 69 1 1
## 8278 118 69 1 1
## 8279 119 69 1 1
## 8280 120 69 1 1
## 8281 1 70 1 1
## 8282 2 70 1 1
## 8283 3 70 1 1
## 8284 4 70 1 1
## 8285 5 70 1 1
## 8286 6 70 1 1
## 8287 7 70 1 1
## 8288 8 70 1 1
## 8289 9 70 1 1
## 8290 10 70 1 1
## 8291 11 70 1 1
## 8292 12 70 1 1
## 8293 13 70 1 1
## 8294 14 70 1 1
## 8295 15 70 1 1
## 8296 16 70 1 1
## 8297 17 70 1 1
## 8298 18 70 1 1
## 8299 19 70 1 1
## 8300 20 70 1 1
## 8301 21 70 1 1
## 8302 22 70 1 1
## 8303 23 70 1 1
## 8304 24 70 1 1
## 8305 25 70 1 1
## 8306 26 70 1 1
## 8307 27 70 1 1
## 8308 28 70 1 1
## 8309 29 70 1 1
## 8310 30 70 1 1
## 8311 31 70 1 1
## 8312 32 70 1 1
## 8313 33 70 1 1
## 8314 34 70 1 1
## 8315 35 70 1 1
## 8316 36 70 1 1
## 8317 37 70 1 1
## 8318 38 70 1 1
## 8319 39 70 1 1
## 8320 40 70 1 1
## 8321 41 70 1 1
## 8322 42 70 1 1
## 8323 43 70 1 1
## 8324 44 70 1 1
## 8325 45 70 1 1
## 8326 46 70 1 1
## 8327 47 70 1 1
## 8328 48 70 1 1
## 8329 49 70 1 1
## 8330 50 70 1 1
## 8331 51 70 1 1
## 8332 52 70 1 1
## 8333 53 70 1 1
## 8334 54 70 1 1
## 8335 55 70 1 1
## 8336 56 70 1 1
## 8337 57 70 1 1
## 8338 58 70 1 1
## 8339 59 70 1 1
## 8340 60 70 1 1
## 8341 61 70 1 1
## 8342 62 70 1 1
## 8343 63 70 1 1
## 8344 64 70 1 1
## 8345 65 70 1 1
## 8346 66 70 1 1
## 8347 67 70 1 1
## 8348 68 70 1 1
## 8349 69 70 1 1
## 8350 70 70 1 1
## 8351 71 70 1 1
## 8352 72 70 1 1
## 8353 73 70 1 1
## 8354 74 70 1 1
## 8355 75 70 1 1
## 8356 76 70 1 1
## 8357 77 70 1 1
## 8358 78 70 1 1
## 8359 79 70 1 1
## 8360 80 70 1 1
## 8361 81 70 1 1
## 8362 82 70 1 1
## 8363 83 70 1 1
## 8364 84 70 1 1
## 8365 85 70 1 1
## 8366 86 70 1 1
## 8367 87 70 1 1
## 8368 88 70 1 1
## 8369 89 70 1 1
## 8370 90 70 1 1
## 8371 91 70 1 1
## 8372 92 70 1 1
## 8373 93 70 1 1
## 8374 94 70 1 1
## 8375 95 70 1 1
## 8376 96 70 1 1
## 8377 97 70 1 1
## 8378 98 70 1 1
## 8379 99 70 1 1
## 8380 100 70 1 1
## 8381 101 70 1 1
## 8382 102 70 1 1
## 8383 103 70 1 1
## 8384 104 70 1 1
## 8385 105 70 1 1
## 8386 106 70 1 1
## 8387 107 70 1 1
## 8388 108 70 1 1
## 8389 109 70 1 1
## 8390 110 70 1 1
## 8391 111 70 1 1
## 8392 112 70 1 1
## 8393 113 70 1 1
## 8394 114 70 1 1
## 8395 115 70 1 1
## 8396 116 70 1 1
## 8397 117 70 1 1
## 8398 118 70 1 1
## 8399 119 70 1 1
## 8400 120 70 1 1
## 8401 1 71 1 1
## 8402 2 71 1 1
## 8403 3 71 1 1
## 8404 4 71 1 1
## 8405 5 71 1 1
## 8406 6 71 1 1
## 8407 7 71 1 1
## 8408 8 71 1 1
## 8409 9 71 1 1
## 8410 10 71 1 1
## 8411 11 71 1 1
## 8412 12 71 1 1
## 8413 13 71 1 1
## 8414 14 71 1 1
## 8415 15 71 1 1
## 8416 16 71 1 1
## 8417 17 71 1 1
## 8418 18 71 1 1
## 8419 19 71 1 1
## 8420 20 71 1 1
## 8421 21 71 1 1
## 8422 22 71 1 1
## 8423 23 71 1 1
## 8424 24 71 1 1
## 8425 25 71 1 1
## 8426 26 71 1 1
## 8427 27 71 1 1
## 8428 28 71 1 1
## 8429 29 71 1 1
## 8430 30 71 1 1
## 8431 31 71 1 1
## 8432 32 71 1 1
## 8433 33 71 1 1
## 8434 34 71 1 1
## 8435 35 71 1 1
## 8436 36 71 1 1
## 8437 37 71 1 1
## 8438 38 71 1 1
## 8439 39 71 1 1
## 8440 40 71 1 1
## 8441 41 71 1 1
## 8442 42 71 1 1
## 8443 43 71 1 1
## 8444 44 71 1 1
## 8445 45 71 1 1
## 8446 46 71 1 1
## 8447 47 71 1 1
## 8448 48 71 1 1
## 8449 49 71 1 1
## 8450 50 71 1 1
## 8451 51 71 1 1
## 8452 52 71 1 1
## 8453 53 71 1 1
## 8454 54 71 1 1
## 8455 55 71 1 1
## 8456 56 71 1 1
## 8457 57 71 1 1
## 8458 58 71 1 1
## 8459 59 71 1 1
## 8460 60 71 1 1
## 8461 61 71 1 1
## 8462 62 71 1 1
## 8463 63 71 1 1
## 8464 64 71 1 1
## 8465 65 71 1 1
## 8466 66 71 1 1
## 8467 67 71 1 1
## 8468 68 71 1 1
## 8469 69 71 1 1
## 8470 70 71 1 1
## 8471 71 71 1 1
## 8472 72 71 1 1
## 8473 73 71 1 1
## 8474 74 71 1 1
## 8475 75 71 1 1
## 8476 76 71 1 1
## 8477 77 71 1 1
## 8478 78 71 1 1
## 8479 79 71 1 1
## 8480 80 71 1 1
## 8481 81 71 1 1
## 8482 82 71 1 1
## 8483 83 71 1 1
## 8484 84 71 1 1
## 8485 85 71 1 1
## 8486 86 71 1 1
## 8487 87 71 1 1
## 8488 88 71 1 1
## 8489 89 71 1 1
## 8490 90 71 1 1
## 8491 91 71 1 1
## 8492 92 71 1 1
## 8493 93 71 1 1
## 8494 94 71 1 1
## 8495 95 71 1 1
## 8496 96 71 1 1
## 8497 97 71 1 1
## 8498 98 71 1 1
## 8499 99 71 1 1
## 8500 100 71 1 1
## 8501 101 71 1 1
## 8502 102 71 1 1
## 8503 103 71 1 1
## 8504 104 71 1 1
## 8505 105 71 1 1
## 8506 106 71 1 1
## 8507 107 71 1 1
## 8508 108 71 1 1
## 8509 109 71 1 1
## 8510 110 71 1 1
## 8511 111 71 1 1
## 8512 112 71 1 1
## 8513 113 71 1 1
## 8514 114 71 1 1
## 8515 115 71 1 1
## 8516 116 71 1 1
## 8517 117 71 1 1
## 8518 118 71 1 1
## 8519 119 71 1 1
## 8520 120 71 1 1
## 8521 1 72 1 1
## 8522 2 72 1 1
## 8523 3 72 1 1
## 8524 4 72 1 1
## 8525 5 72 1 1
## 8526 6 72 1 1
## 8527 7 72 1 1
## 8528 8 72 1 1
## 8529 9 72 1 1
## 8530 10 72 1 1
## 8531 11 72 1 1
## 8532 12 72 1 1
## 8533 13 72 1 1
## 8534 14 72 1 1
## 8535 15 72 1 1
## 8536 16 72 1 1
## 8537 17 72 1 1
## 8538 18 72 1 1
## 8539 19 72 1 1
## 8540 20 72 1 1
## 8541 21 72 1 1
## 8542 22 72 1 1
## 8543 23 72 1 1
## 8544 24 72 1 1
## 8545 25 72 1 1
## 8546 26 72 1 1
## 8547 27 72 1 1
## 8548 28 72 1 1
## 8549 29 72 1 1
## 8550 30 72 1 1
## 8551 31 72 1 1
## 8552 32 72 1 1
## 8553 33 72 1 1
## 8554 34 72 1 1
## 8555 35 72 1 1
## 8556 36 72 1 1
## 8557 37 72 1 1
## 8558 38 72 1 1
## 8559 39 72 1 1
## 8560 40 72 1 1
## 8561 41 72 1 1
## 8562 42 72 1 1
## 8563 43 72 1 1
## 8564 44 72 1 1
## 8565 45 72 1 1
## 8566 46 72 1 1
## 8567 47 72 1 1
## 8568 48 72 1 1
## 8569 49 72 1 1
## 8570 50 72 1 1
## 8571 51 72 1 1
## 8572 52 72 1 1
## 8573 53 72 1 1
## 8574 54 72 1 1
## 8575 55 72 1 1
## 8576 56 72 1 1
## 8577 57 72 1 1
## 8578 58 72 1 1
## 8579 59 72 1 1
## 8580 60 72 1 1
## 8581 61 72 1 1
## 8582 62 72 1 1
## 8583 63 72 1 1
## 8584 64 72 1 1
## 8585 65 72 1 1
## 8586 66 72 1 1
## 8587 67 72 1 1
## 8588 68 72 1 1
## 8589 69 72 1 1
## 8590 70 72 1 1
## 8591 71 72 1 1
## 8592 72 72 1 1
## 8593 73 72 1 1
## 8594 74 72 1 1
## 8595 75 72 1 1
## 8596 76 72 1 1
## 8597 77 72 1 1
## 8598 78 72 1 1
## 8599 79 72 1 1
## 8600 80 72 1 1
## 8601 81 72 1 1
## 8602 82 72 1 1
## 8603 83 72 1 1
## 8604 84 72 1 1
## 8605 85 72 1 1
## 8606 86 72 1 1
## 8607 87 72 1 1
## 8608 88 72 1 1
## 8609 89 72 1 1
## 8610 90 72 1 1
## 8611 91 72 1 1
## 8612 92 72 1 1
## 8613 93 72 1 1
## 8614 94 72 1 1
## 8615 95 72 1 1
## 8616 96 72 1 1
## 8617 97 72 1 1
## 8618 98 72 1 1
## 8619 99 72 1 1
## 8620 100 72 1 1
## 8621 101 72 1 1
## 8622 102 72 1 1
## 8623 103 72 1 1
## 8624 104 72 1 1
## 8625 105 72 1 1
## 8626 106 72 1 1
## 8627 107 72 1 1
## 8628 108 72 1 1
## 8629 109 72 1 1
## 8630 110 72 1 1
## 8631 111 72 1 1
## 8632 112 72 1 1
## 8633 113 72 1 1
## 8634 114 72 1 1
## 8635 115 72 1 1
## 8636 116 72 1 1
## 8637 117 72 1 1
## 8638 118 72 1 1
## 8639 119 72 1 1
## 8640 120 72 1 1
## 8641 1 73 1 1
## 8642 2 73 1 1
## 8643 3 73 1 1
## 8644 4 73 1 1
## 8645 5 73 1 1
## 8646 6 73 1 1
## 8647 7 73 1 1
## 8648 8 73 1 1
## 8649 9 73 1 1
## 8650 10 73 1 1
## 8651 11 73 1 1
## 8652 12 73 1 1
## 8653 13 73 1 1
## 8654 14 73 1 1
## 8655 15 73 1 1
## 8656 16 73 1 1
## 8657 17 73 1 1
## 8658 18 73 1 1
## 8659 19 73 1 1
## 8660 20 73 1 1
## 8661 21 73 1 1
## 8662 22 73 1 1
## 8663 23 73 1 1
## 8664 24 73 1 1
## 8665 25 73 1 1
## 8666 26 73 1 1
## 8667 27 73 1 1
## 8668 28 73 1 1
## 8669 29 73 1 1
## 8670 30 73 1 1
## 8671 31 73 1 1
## 8672 32 73 1 1
## 8673 33 73 1 1
## 8674 34 73 1 1
## 8675 35 73 1 1
## 8676 36 73 1 1
## 8677 37 73 1 1
## 8678 38 73 1 1
## 8679 39 73 1 1
## 8680 40 73 1 1
## 8681 41 73 1 1
## 8682 42 73 1 1
## 8683 43 73 1 1
## 8684 44 73 1 1
## 8685 45 73 1 1
## 8686 46 73 1 1
## 8687 47 73 1 1
## 8688 48 73 1 1
## 8689 49 73 1 1
## 8690 50 73 1 1
## 8691 51 73 1 1
## 8692 52 73 1 1
## 8693 53 73 1 1
## 8694 54 73 1 1
## 8695 55 73 1 1
## 8696 56 73 1 1
## 8697 57 73 1 1
## 8698 58 73 1 1
## 8699 59 73 1 1
## 8700 60 73 1 1
## 8701 61 73 1 1
## 8702 62 73 1 1
## 8703 63 73 1 1
## 8704 64 73 1 1
## 8705 65 73 1 1
## 8706 66 73 1 1
## 8707 67 73 1 1
## 8708 68 73 1 1
## 8709 69 73 1 1
## 8710 70 73 1 1
## 8711 71 73 1 1
## 8712 72 73 1 1
## 8713 73 73 1 1
## 8714 74 73 1 1
## 8715 75 73 1 1
## 8716 76 73 1 1
## 8717 77 73 1 1
## 8718 78 73 1 1
## 8719 79 73 1 1
## 8720 80 73 1 1
## 8721 81 73 1 1
## 8722 82 73 1 1
## 8723 83 73 1 1
## 8724 84 73 1 1
## 8725 85 73 1 1
## 8726 86 73 1 1
## 8727 87 73 1 1
## 8728 88 73 1 1
## 8729 89 73 1 1
## 8730 90 73 1 1
## 8731 91 73 1 1
## 8732 92 73 1 1
## 8733 93 73 1 1
## 8734 94 73 1 1
## 8735 95 73 1 1
## 8736 96 73 1 1
## 8737 97 73 1 1
## 8738 98 73 1 1
## 8739 99 73 1 1
## 8740 100 73 1 1
## 8741 101 73 1 1
## 8742 102 73 1 1
## 8743 103 73 1 1
## 8744 104 73 1 1
## 8745 105 73 1 1
## 8746 106 73 1 1
## 8747 107 73 1 1
## 8748 108 73 1 1
## 8749 109 73 1 1
## 8750 110 73 1 1
## 8751 111 73 1 1
## 8752 112 73 1 1
## 8753 113 73 1 1
## 8754 114 73 1 1
## 8755 115 73 1 1
## 8756 116 73 1 1
## 8757 117 73 1 1
## 8758 118 73 1 1
## 8759 119 73 1 1
## 8760 120 73 1 1
## 8761 1 74 1 1
## 8762 2 74 1 1
## 8763 3 74 1 1
## 8764 4 74 1 1
## 8765 5 74 1 1
## 8766 6 74 1 1
## 8767 7 74 1 1
## 8768 8 74 1 1
## 8769 9 74 1 1
## 8770 10 74 1 1
## 8771 11 74 1 1
## 8772 12 74 1 1
## 8773 13 74 1 1
## 8774 14 74 1 1
## 8775 15 74 1 1
## 8776 16 74 1 1
## 8777 17 74 1 1
## 8778 18 74 1 1
## 8779 19 74 1 1
## 8780 20 74 1 1
## 8781 21 74 1 1
## 8782 22 74 1 1
## 8783 23 74 1 1
## 8784 24 74 1 1
## 8785 25 74 1 1
## 8786 26 74 1 1
## 8787 27 74 1 1
## 8788 28 74 1 1
## 8789 29 74 1 1
## 8790 30 74 1 1
## 8791 31 74 1 1
## 8792 32 74 1 1
## 8793 33 74 1 1
## 8794 34 74 1 1
## 8795 35 74 1 1
## 8796 36 74 1 1
## 8797 37 74 1 1
## 8798 38 74 1 1
## 8799 39 74 1 1
## 8800 40 74 1 1
## 8801 41 74 1 1
## 8802 42 74 1 1
## 8803 43 74 1 1
## 8804 44 74 1 1
## 8805 45 74 1 1
## 8806 46 74 1 1
## 8807 47 74 1 1
## 8808 48 74 1 1
## 8809 49 74 1 1
## 8810 50 74 1 1
## 8811 51 74 1 1
## 8812 52 74 1 1
## 8813 53 74 1 1
## 8814 54 74 1 1
## 8815 55 74 1 1
## 8816 56 74 1 1
## 8817 57 74 1 1
## 8818 58 74 1 1
## 8819 59 74 1 1
## 8820 60 74 1 1
## 8821 61 74 1 1
## 8822 62 74 1 1
## 8823 63 74 1 1
## 8824 64 74 1 1
## 8825 65 74 1 1
## 8826 66 74 1 1
## 8827 67 74 1 1
## 8828 68 74 1 1
## 8829 69 74 1 1
## 8830 70 74 1 1
## 8831 71 74 1 1
## 8832 72 74 1 1
## 8833 73 74 1 1
## 8834 74 74 1 1
## 8835 75 74 1 1
## 8836 76 74 1 1
## 8837 77 74 1 1
## 8838 78 74 1 1
## 8839 79 74 1 1
## 8840 80 74 1 1
## 8841 81 74 1 1
## 8842 82 74 1 1
## 8843 83 74 1 1
## 8844 84 74 1 1
## 8845 85 74 1 1
## 8846 86 74 1 1
## 8847 87 74 1 1
## 8848 88 74 1 1
## 8849 89 74 1 1
## 8850 90 74 1 1
## 8851 91 74 1 1
## 8852 92 74 1 1
## 8853 93 74 1 1
## 8854 94 74 1 1
## 8855 95 74 1 1
## 8856 96 74 1 1
## 8857 97 74 1 1
## 8858 98 74 1 1
## 8859 99 74 1 1
## 8860 100 74 1 1
## 8861 101 74 1 1
## 8862 102 74 1 1
## 8863 103 74 1 1
## 8864 104 74 1 1
## 8865 105 74 1 1
## 8866 106 74 1 1
## 8867 107 74 1 1
## 8868 108 74 1 1
## 8869 109 74 1 1
## 8870 110 74 1 1
## 8871 111 74 1 1
## 8872 112 74 1 1
## 8873 113 74 1 1
## 8874 114 74 1 1
## 8875 115 74 1 1
## 8876 116 74 1 1
## 8877 117 74 1 1
## 8878 118 74 1 1
## 8879 119 74 1 1
## 8880 120 74 1 1
## 8881 1 75 1 1
## 8882 2 75 1 1
## 8883 3 75 1 1
## 8884 4 75 1 1
## 8885 5 75 1 1
## 8886 6 75 1 1
## 8887 7 75 1 1
## 8888 8 75 1 1
## 8889 9 75 1 1
## 8890 10 75 1 1
## 8891 11 75 1 1
## 8892 12 75 1 1
## 8893 13 75 1 1
## 8894 14 75 1 1
## 8895 15 75 1 1
## 8896 16 75 1 1
## 8897 17 75 1 1
## 8898 18 75 1 1
## 8899 19 75 1 1
## 8900 20 75 1 1
## 8901 21 75 1 1
## 8902 22 75 1 1
## 8903 23 75 1 1
## 8904 24 75 1 1
## 8905 25 75 1 1
## 8906 26 75 1 1
## 8907 27 75 1 1
## 8908 28 75 1 1
## 8909 29 75 1 1
## 8910 30 75 1 1
## 8911 31 75 1 1
## 8912 32 75 1 1
## 8913 33 75 1 1
## 8914 34 75 1 1
## 8915 35 75 1 1
## 8916 36 75 1 1
## 8917 37 75 1 1
## 8918 38 75 1 1
## 8919 39 75 1 1
## 8920 40 75 1 1
## 8921 41 75 1 1
## 8922 42 75 1 1
## 8923 43 75 1 1
## 8924 44 75 1 1
## 8925 45 75 1 1
## 8926 46 75 1 1
## 8927 47 75 1 1
## 8928 48 75 1 1
## 8929 49 75 1 1
## 8930 50 75 1 1
## 8931 51 75 1 1
## 8932 52 75 1 1
## 8933 53 75 1 1
## 8934 54 75 1 1
## 8935 55 75 1 1
## 8936 56 75 1 1
## 8937 57 75 1 1
## 8938 58 75 1 1
## 8939 59 75 1 1
## 8940 60 75 1 1
## 8941 61 75 1 1
## 8942 62 75 1 1
## 8943 63 75 1 1
## 8944 64 75 1 1
## 8945 65 75 1 1
## 8946 66 75 1 1
## 8947 67 75 1 1
## 8948 68 75 1 1
## 8949 69 75 1 1
## 8950 70 75 1 1
## 8951 71 75 1 1
## 8952 72 75 1 1
## 8953 73 75 1 1
## 8954 74 75 1 1
## 8955 75 75 1 1
## 8956 76 75 1 1
## 8957 77 75 1 1
## 8958 78 75 1 1
## 8959 79 75 1 1
## 8960 80 75 1 1
## 8961 81 75 1 1
## 8962 82 75 1 1
## 8963 83 75 1 1
## 8964 84 75 1 1
## 8965 85 75 1 1
## 8966 86 75 1 1
## 8967 87 75 1 1
## 8968 88 75 1 1
## 8969 89 75 1 1
## 8970 90 75 1 1
## 8971 91 75 1 1
## 8972 92 75 1 1
## 8973 93 75 1 1
## 8974 94 75 1 1
## 8975 95 75 1 1
## 8976 96 75 1 1
## 8977 97 75 1 1
## 8978 98 75 1 1
## 8979 99 75 1 1
## 8980 100 75 1 1
## 8981 101 75 1 1
## 8982 102 75 1 1
## 8983 103 75 1 1
## 8984 104 75 1 1
## 8985 105 75 1 1
## 8986 106 75 1 1
## 8987 107 75 1 1
## 8988 108 75 1 1
## 8989 109 75 1 1
## 8990 110 75 1 1
## 8991 111 75 1 1
## 8992 112 75 1 1
## 8993 113 75 1 1
## 8994 114 75 1 1
## 8995 115 75 1 1
## 8996 116 75 1 1
## 8997 117 75 1 1
## 8998 118 75 1 1
## 8999 119 75 1 1
## 9000 120 75 1 1
## 9001 1 76 1 1
## 9002 2 76 1 1
## 9003 3 76 1 1
## 9004 4 76 1 1
## 9005 5 76 1 1
## 9006 6 76 1 1
## 9007 7 76 1 1
## 9008 8 76 1 1
## 9009 9 76 1 1
## 9010 10 76 1 1
## 9011 11 76 1 1
## 9012 12 76 1 1
## 9013 13 76 1 1
## 9014 14 76 1 1
## 9015 15 76 1 1
## 9016 16 76 1 1
## 9017 17 76 1 1
## 9018 18 76 1 1
## 9019 19 76 1 1
## 9020 20 76 1 1
## 9021 21 76 1 1
## 9022 22 76 1 1
## 9023 23 76 1 1
## 9024 24 76 1 1
## 9025 25 76 1 1
## 9026 26 76 1 1
## 9027 27 76 1 1
## 9028 28 76 1 1
## 9029 29 76 1 1
## 9030 30 76 1 1
## 9031 31 76 1 1
## 9032 32 76 1 1
## 9033 33 76 1 1
## 9034 34 76 1 1
## 9035 35 76 1 1
## 9036 36 76 1 1
## 9037 37 76 1 1
## 9038 38 76 1 1
## 9039 39 76 1 1
## 9040 40 76 1 1
## 9041 41 76 1 1
## 9042 42 76 1 1
## 9043 43 76 1 1
## 9044 44 76 1 1
## 9045 45 76 1 1
## 9046 46 76 1 1
## 9047 47 76 1 1
## 9048 48 76 1 1
## 9049 49 76 1 1
## 9050 50 76 1 1
## 9051 51 76 1 1
## 9052 52 76 1 1
## 9053 53 76 1 1
## 9054 54 76 1 1
## 9055 55 76 1 1
## 9056 56 76 1 1
## 9057 57 76 1 1
## 9058 58 76 1 1
## 9059 59 76 1 1
## 9060 60 76 1 1
## 9061 61 76 1 1
## 9062 62 76 1 1
## 9063 63 76 1 1
## 9064 64 76 1 1
## 9065 65 76 1 1
## 9066 66 76 1 1
## 9067 67 76 1 1
## 9068 68 76 1 1
## 9069 69 76 1 1
## 9070 70 76 1 1
## 9071 71 76 1 1
## 9072 72 76 1 1
## 9073 73 76 1 1
## 9074 74 76 1 1
## 9075 75 76 1 1
## 9076 76 76 1 1
## 9077 77 76 1 1
## 9078 78 76 1 1
## 9079 79 76 1 1
## 9080 80 76 1 1
## 9081 81 76 1 1
## 9082 82 76 1 1
## 9083 83 76 1 1
## 9084 84 76 1 1
## 9085 85 76 1 1
## 9086 86 76 1 1
## 9087 87 76 1 1
## 9088 88 76 1 1
## 9089 89 76 1 1
## 9090 90 76 1 1
## 9091 91 76 1 1
## 9092 92 76 1 1
## 9093 93 76 1 1
## 9094 94 76 1 1
## 9095 95 76 1 1
## 9096 96 76 1 1
## 9097 97 76 1 1
## 9098 98 76 1 1
## 9099 99 76 1 1
## 9100 100 76 1 1
## 9101 101 76 1 1
## 9102 102 76 1 1
## 9103 103 76 1 1
## 9104 104 76 1 1
## 9105 105 76 1 1
## 9106 106 76 1 1
## 9107 107 76 1 1
## 9108 108 76 1 1
## 9109 109 76 1 1
## 9110 110 76 1 1
## 9111 111 76 1 1
## 9112 112 76 1 1
## 9113 113 76 1 1
## 9114 114 76 1 1
## 9115 115 76 1 1
## 9116 116 76 1 1
## 9117 117 76 1 1
## 9118 118 76 1 1
## 9119 119 76 1 1
## 9120 120 76 1 1
## 9121 1 77 1 1
## 9122 2 77 1 1
## 9123 3 77 1 1
## 9124 4 77 1 1
## 9125 5 77 1 1
## 9126 6 77 1 1
## 9127 7 77 1 1
## 9128 8 77 1 1
## 9129 9 77 1 1
## 9130 10 77 1 1
## 9131 11 77 1 1
## 9132 12 77 1 1
## 9133 13 77 1 1
## 9134 14 77 1 1
## 9135 15 77 1 1
## 9136 16 77 1 1
## 9137 17 77 1 1
## 9138 18 77 1 1
## 9139 19 77 1 1
## 9140 20 77 1 1
## 9141 21 77 1 1
## 9142 22 77 1 1
## 9143 23 77 1 1
## 9144 24 77 1 1
## 9145 25 77 1 1
## 9146 26 77 1 1
## 9147 27 77 1 1
## 9148 28 77 1 1
## 9149 29 77 1 1
## 9150 30 77 1 1
## 9151 31 77 1 1
## 9152 32 77 1 1
## 9153 33 77 1 1
## 9154 34 77 1 1
## 9155 35 77 1 1
## 9156 36 77 1 1
## 9157 37 77 1 1
## 9158 38 77 1 1
## 9159 39 77 1 1
## 9160 40 77 1 1
## 9161 41 77 1 1
## 9162 42 77 1 1
## 9163 43 77 1 1
## 9164 44 77 1 1
## 9165 45 77 1 1
## 9166 46 77 1 1
## 9167 47 77 1 1
## 9168 48 77 1 1
## 9169 49 77 1 1
## 9170 50 77 1 1
## 9171 51 77 1 1
## 9172 52 77 1 1
## 9173 53 77 1 1
## 9174 54 77 1 1
## 9175 55 77 1 1
## 9176 56 77 1 1
## 9177 57 77 1 1
## 9178 58 77 1 1
## 9179 59 77 1 1
## 9180 60 77 1 1
## 9181 61 77 1 1
## 9182 62 77 1 1
## 9183 63 77 1 1
## 9184 64 77 1 1
## 9185 65 77 1 1
## 9186 66 77 1 1
## 9187 67 77 1 1
## 9188 68 77 1 1
## 9189 69 77 1 1
## 9190 70 77 1 1
## 9191 71 77 1 1
## 9192 72 77 1 1
## 9193 73 77 1 1
## 9194 74 77 1 1
## 9195 75 77 1 1
## 9196 76 77 1 1
## 9197 77 77 1 1
## 9198 78 77 1 1
## 9199 79 77 1 1
## 9200 80 77 1 1
## 9201 81 77 1 1
## 9202 82 77 1 1
## 9203 83 77 1 1
## 9204 84 77 1 1
## 9205 85 77 1 1
## 9206 86 77 1 1
## 9207 87 77 1 1
## 9208 88 77 1 1
## 9209 89 77 1 1
## 9210 90 77 1 1
## 9211 91 77 1 1
## 9212 92 77 1 1
## 9213 93 77 1 1
## 9214 94 77 1 1
## 9215 95 77 1 1
## 9216 96 77 1 1
## 9217 97 77 1 1
## 9218 98 77 1 1
## 9219 99 77 1 1
## 9220 100 77 1 1
## 9221 101 77 1 1
## 9222 102 77 1 1
## 9223 103 77 1 1
## 9224 104 77 1 1
## 9225 105 77 1 1
## 9226 106 77 1 1
## 9227 107 77 1 1
## 9228 108 77 1 1
## 9229 109 77 1 1
## 9230 110 77 1 1
## 9231 111 77 1 1
## 9232 112 77 1 1
## 9233 113 77 1 1
## 9234 114 77 1 1
## 9235 115 77 1 1
## 9236 116 77 1 1
## 9237 117 77 1 1
## 9238 118 77 1 1
## 9239 119 77 1 1
## 9240 120 77 1 1
## 9241 1 78 1 1
## 9242 2 78 1 1
## 9243 3 78 1 1
## 9244 4 78 1 1
## 9245 5 78 1 1
## 9246 6 78 1 1
## 9247 7 78 1 1
## 9248 8 78 1 1
## 9249 9 78 1 1
## 9250 10 78 1 1
## 9251 11 78 1 1
## 9252 12 78 1 1
## 9253 13 78 1 1
## 9254 14 78 1 1
## 9255 15 78 1 1
## 9256 16 78 1 1
## 9257 17 78 1 1
## 9258 18 78 1 1
## 9259 19 78 1 1
## 9260 20 78 1 1
## 9261 21 78 1 1
## 9262 22 78 1 1
## 9263 23 78 1 1
## 9264 24 78 1 1
## 9265 25 78 1 1
## 9266 26 78 1 1
## 9267 27 78 1 1
## 9268 28 78 1 1
## 9269 29 78 1 1
## 9270 30 78 1 1
## 9271 31 78 1 1
## 9272 32 78 1 1
## 9273 33 78 1 1
## 9274 34 78 1 1
## 9275 35 78 1 1
## 9276 36 78 1 1
## 9277 37 78 1 1
## 9278 38 78 1 1
## 9279 39 78 1 1
## 9280 40 78 1 1
## 9281 41 78 1 1
## 9282 42 78 1 1
## 9283 43 78 1 1
## 9284 44 78 1 1
## 9285 45 78 1 1
## 9286 46 78 1 1
## 9287 47 78 1 1
## 9288 48 78 1 1
## 9289 49 78 1 1
## 9290 50 78 1 1
## 9291 51 78 1 1
## 9292 52 78 1 1
## 9293 53 78 1 1
## 9294 54 78 1 1
## 9295 55 78 1 1
## 9296 56 78 1 1
## 9297 57 78 1 1
## 9298 58 78 1 1
## 9299 59 78 1 1
## 9300 60 78 1 1
## 9301 61 78 1 1
## 9302 62 78 1 1
## 9303 63 78 1 1
## 9304 64 78 1 1
## 9305 65 78 1 1
## 9306 66 78 1 1
## 9307 67 78 1 1
## 9308 68 78 1 1
## 9309 69 78 1 1
## 9310 70 78 1 1
## 9311 71 78 1 1
## 9312 72 78 1 1
## 9313 73 78 1 1
## 9314 74 78 1 1
## 9315 75 78 1 1
## 9316 76 78 1 1
## 9317 77 78 1 1
## 9318 78 78 1 1
## 9319 79 78 1 1
## 9320 80 78 1 1
## 9321 81 78 1 1
## 9322 82 78 1 1
## 9323 83 78 1 1
## 9324 84 78 1 1
## 9325 85 78 1 1
## 9326 86 78 1 1
## 9327 87 78 1 1
## 9328 88 78 1 1
## 9329 89 78 1 1
## 9330 90 78 1 1
## 9331 91 78 1 1
## 9332 92 78 1 1
## 9333 93 78 1 1
## 9334 94 78 1 1
## 9335 95 78 1 1
## 9336 96 78 1 1
## 9337 97 78 1 1
## 9338 98 78 1 1
## 9339 99 78 1 1
## 9340 100 78 1 1
## 9341 101 78 1 1
## 9342 102 78 1 1
## 9343 103 78 1 1
## 9344 104 78 1 1
## 9345 105 78 1 1
## 9346 106 78 1 1
## 9347 107 78 1 1
## 9348 108 78 1 1
## 9349 109 78 1 1
## 9350 110 78 1 1
## 9351 111 78 1 1
## 9352 112 78 1 1
## 9353 113 78 1 1
## 9354 114 78 1 1
## 9355 115 78 1 1
## 9356 116 78 1 1
## 9357 117 78 1 1
## 9358 118 78 1 1
## 9359 119 78 1 1
## 9360 120 78 1 1
## 9361 1 79 1 1
## 9362 2 79 1 1
## 9363 3 79 1 1
## 9364 4 79 1 1
## 9365 5 79 1 1
## 9366 6 79 1 1
## 9367 7 79 1 1
## 9368 8 79 1 1
## 9369 9 79 1 1
## 9370 10 79 1 1
## 9371 11 79 1 1
## 9372 12 79 1 1
## 9373 13 79 1 1
## 9374 14 79 1 1
## 9375 15 79 1 1
## 9376 16 79 1 1
## 9377 17 79 1 1
## 9378 18 79 1 1
## 9379 19 79 1 1
## 9380 20 79 1 1
## 9381 21 79 1 1
## 9382 22 79 1 1
## 9383 23 79 1 1
## 9384 24 79 1 1
## 9385 25 79 1 1
## 9386 26 79 1 1
## 9387 27 79 1 1
## 9388 28 79 1 1
## 9389 29 79 1 1
## 9390 30 79 1 1
## 9391 31 79 1 1
## 9392 32 79 1 1
## 9393 33 79 1 1
## 9394 34 79 1 1
## 9395 35 79 1 1
## 9396 36 79 1 1
## 9397 37 79 1 1
## 9398 38 79 1 1
## 9399 39 79 1 1
## 9400 40 79 1 1
## 9401 41 79 1 1
## 9402 42 79 1 1
## 9403 43 79 1 1
## 9404 44 79 1 1
## 9405 45 79 1 1
## 9406 46 79 1 1
## 9407 47 79 1 1
## 9408 48 79 1 1
## 9409 49 79 1 1
## 9410 50 79 1 1
## 9411 51 79 1 1
## 9412 52 79 1 1
## 9413 53 79 1 1
## 9414 54 79 1 1
## 9415 55 79 1 1
## 9416 56 79 1 1
## 9417 57 79 1 1
## 9418 58 79 1 1
## 9419 59 79 1 1
## 9420 60 79 1 1
## 9421 61 79 1 1
## 9422 62 79 1 1
## 9423 63 79 1 1
## 9424 64 79 1 1
## 9425 65 79 1 1
## 9426 66 79 1 1
## 9427 67 79 1 1
## 9428 68 79 1 1
## 9429 69 79 1 1
## 9430 70 79 1 1
## 9431 71 79 1 1
## 9432 72 79 1 1
## 9433 73 79 1 1
## 9434 74 79 1 1
## 9435 75 79 1 1
## 9436 76 79 1 1
## 9437 77 79 1 1
## 9438 78 79 1 1
## 9439 79 79 1 1
## 9440 80 79 1 1
## 9441 81 79 1 1
## 9442 82 79 1 1
## 9443 83 79 1 1
## 9444 84 79 1 1
## 9445 85 79 1 1
## 9446 86 79 1 1
## 9447 87 79 1 1
## 9448 88 79 1 1
## 9449 89 79 1 1
## 9450 90 79 1 1
## 9451 91 79 1 1
## 9452 92 79 1 1
## 9453 93 79 1 1
## 9454 94 79 1 1
## 9455 95 79 1 1
## 9456 96 79 1 1
## 9457 97 79 1 1
## 9458 98 79 1 1
## 9459 99 79 1 1
## 9460 100 79 1 1
## 9461 101 79 1 1
## 9462 102 79 1 1
## 9463 103 79 1 1
## 9464 104 79 1 1
## 9465 105 79 1 1
## 9466 106 79 1 1
## 9467 107 79 1 1
## 9468 108 79 1 1
## 9469 109 79 1 1
## 9470 110 79 1 1
## 9471 111 79 1 1
## 9472 112 79 1 1
## 9473 113 79 1 1
## 9474 114 79 1 1
## 9475 115 79 1 1
## 9476 116 79 1 1
## 9477 117 79 1 1
## 9478 118 79 1 1
## 9479 119 79 1 1
## 9480 120 79 1 1
## 9481 1 80 1 1
## 9482 2 80 1 1
## 9483 3 80 1 1
## 9484 4 80 1 1
## 9485 5 80 1 1
## 9486 6 80 1 1
## 9487 7 80 1 1
## 9488 8 80 1 1
## 9489 9 80 1 1
## 9490 10 80 1 1
## 9491 11 80 1 1
## 9492 12 80 1 1
## 9493 13 80 1 1
## 9494 14 80 1 1
## 9495 15 80 1 1
## 9496 16 80 1 1
## 9497 17 80 1 1
## 9498 18 80 1 1
## 9499 19 80 1 1
## 9500 20 80 1 1
## 9501 21 80 1 1
## 9502 22 80 1 1
## 9503 23 80 1 1
## 9504 24 80 1 1
## 9505 25 80 1 1
## 9506 26 80 1 1
## 9507 27 80 1 1
## 9508 28 80 1 1
## 9509 29 80 1 1
## 9510 30 80 1 1
## 9511 31 80 1 1
## 9512 32 80 1 1
## 9513 33 80 1 1
## 9514 34 80 1 1
## 9515 35 80 1 1
## 9516 36 80 1 1
## 9517 37 80 1 1
## 9518 38 80 1 1
## 9519 39 80 1 1
## 9520 40 80 1 1
## 9521 41 80 1 1
## 9522 42 80 1 1
## 9523 43 80 1 1
## 9524 44 80 1 1
## 9525 45 80 1 1
## 9526 46 80 1 1
## 9527 47 80 1 1
## 9528 48 80 1 1
## 9529 49 80 1 1
## 9530 50 80 1 1
## 9531 51 80 1 1
## 9532 52 80 1 1
## 9533 53 80 1 1
## 9534 54 80 1 1
## 9535 55 80 1 1
## 9536 56 80 1 1
## 9537 57 80 1 1
## 9538 58 80 1 1
## 9539 59 80 1 1
## 9540 60 80 1 1
## 9541 61 80 1 1
## 9542 62 80 1 1
## 9543 63 80 1 1
## 9544 64 80 1 1
## 9545 65 80 1 1
## 9546 66 80 1 1
## 9547 67 80 1 1
## 9548 68 80 1 1
## 9549 69 80 1 1
## 9550 70 80 1 1
## 9551 71 80 1 1
## 9552 72 80 1 1
## 9553 73 80 1 1
## 9554 74 80 1 1
## 9555 75 80 1 1
## 9556 76 80 1 1
## 9557 77 80 1 1
## 9558 78 80 1 1
## 9559 79 80 1 1
## 9560 80 80 1 1
## 9561 81 80 1 1
## 9562 82 80 1 1
## 9563 83 80 1 1
## 9564 84 80 1 1
## 9565 85 80 1 1
## 9566 86 80 1 1
## 9567 87 80 1 1
## 9568 88 80 1 1
## 9569 89 80 1 1
## 9570 90 80 1 1
## 9571 91 80 1 1
## 9572 92 80 1 1
## 9573 93 80 1 1
## 9574 94 80 1 1
## 9575 95 80 1 1
## 9576 96 80 1 1
## 9577 97 80 1 1
## 9578 98 80 1 1
## 9579 99 80 1 1
## 9580 100 80 1 1
## 9581 101 80 1 1
## 9582 102 80 1 1
## 9583 103 80 1 1
## 9584 104 80 1 1
## 9585 105 80 1 1
## 9586 106 80 1 1
## 9587 107 80 1 1
## 9588 108 80 1 1
## 9589 109 80 1 1
## 9590 110 80 1 1
## 9591 111 80 1 1
## 9592 112 80 1 1
## 9593 113 80 1 1
## 9594 114 80 1 1
## 9595 115 80 1 1
## 9596 116 80 1 1
## 9597 117 80 1 1
## 9598 118 80 1 1
## 9599 119 80 1 1
## 9600 120 80 1 1
## 9601 1 81 1 1
## 9602 2 81 1 1
## 9603 3 81 1 1
## 9604 4 81 1 1
## 9605 5 81 1 1
## 9606 6 81 1 1
## 9607 7 81 1 1
## 9608 8 81 1 1
## 9609 9 81 1 1
## 9610 10 81 1 1
## 9611 11 81 1 1
## 9612 12 81 1 1
## 9613 13 81 1 1
## 9614 14 81 1 1
## 9615 15 81 1 1
## 9616 16 81 1 1
## 9617 17 81 1 1
## 9618 18 81 1 1
## 9619 19 81 1 1
## 9620 20 81 1 1
## 9621 21 81 1 1
## 9622 22 81 1 1
## 9623 23 81 1 1
## 9624 24 81 1 1
## 9625 25 81 1 1
## 9626 26 81 1 1
## 9627 27 81 1 1
## 9628 28 81 1 1
## 9629 29 81 1 1
## 9630 30 81 1 1
## 9631 31 81 1 1
## 9632 32 81 1 1
## 9633 33 81 1 1
## 9634 34 81 1 1
## 9635 35 81 1 1
## 9636 36 81 1 1
## 9637 37 81 1 1
## 9638 38 81 1 1
## 9639 39 81 1 1
## 9640 40 81 1 1
## 9641 41 81 1 1
## 9642 42 81 1 1
## 9643 43 81 1 1
## 9644 44 81 1 1
## 9645 45 81 1 1
## 9646 46 81 1 1
## 9647 47 81 1 1
## 9648 48 81 1 1
## 9649 49 81 1 1
## 9650 50 81 1 1
## 9651 51 81 1 1
## 9652 52 81 1 1
## 9653 53 81 1 1
## 9654 54 81 1 1
## 9655 55 81 1 1
## 9656 56 81 1 1
## 9657 57 81 1 1
## 9658 58 81 1 1
## 9659 59 81 1 1
## 9660 60 81 1 1
## 9661 61 81 1 1
## 9662 62 81 1 1
## 9663 63 81 1 1
## 9664 64 81 1 1
## 9665 65 81 1 1
## 9666 66 81 1 1
## 9667 67 81 1 1
## 9668 68 81 1 1
## 9669 69 81 1 1
## 9670 70 81 1 1
## 9671 71 81 1 1
## 9672 72 81 1 1
## 9673 73 81 1 1
## 9674 74 81 1 1
## 9675 75 81 1 1
## 9676 76 81 1 1
## 9677 77 81 1 1
## 9678 78 81 1 1
## 9679 79 81 1 1
## 9680 80 81 1 1
## 9681 81 81 1 1
## 9682 82 81 1 1
## 9683 83 81 1 1
## 9684 84 81 1 1
## 9685 85 81 1 1
## 9686 86 81 1 1
## 9687 87 81 1 1
## 9688 88 81 1 1
## 9689 89 81 1 1
## 9690 90 81 1 1
## 9691 91 81 1 1
## 9692 92 81 1 1
## 9693 93 81 1 1
## 9694 94 81 1 1
## 9695 95 81 1 1
## 9696 96 81 1 1
## 9697 97 81 1 1
## 9698 98 81 1 1
## 9699 99 81 1 1
## 9700 100 81 1 1
## 9701 101 81 1 1
## 9702 102 81 1 1
## 9703 103 81 1 1
## 9704 104 81 1 1
## 9705 105 81 1 1
## 9706 106 81 1 1
## 9707 107 81 1 1
## 9708 108 81 1 1
## 9709 109 81 1 1
## 9710 110 81 1 1
## 9711 111 81 1 1
## 9712 112 81 1 1
## 9713 113 81 1 1
## 9714 114 81 1 1
## 9715 115 81 1 1
## 9716 116 81 1 1
## 9717 117 81 1 1
## 9718 118 81 1 1
## 9719 119 81 1 1
## 9720 120 81 1 1
## 9721 1 82 1 1
## 9722 2 82 1 1
## 9723 3 82 1 1
## 9724 4 82 1 1
## 9725 5 82 1 1
## 9726 6 82 1 1
## 9727 7 82 1 1
## 9728 8 82 1 1
## 9729 9 82 1 1
## 9730 10 82 1 1
## 9731 11 82 1 1
## 9732 12 82 1 1
## 9733 13 82 1 1
## 9734 14 82 1 1
## 9735 15 82 1 1
## 9736 16 82 1 1
## 9737 17 82 1 1
## 9738 18 82 1 1
## 9739 19 82 1 1
## 9740 20 82 1 1
## 9741 21 82 1 1
## 9742 22 82 1 1
## 9743 23 82 1 1
## 9744 24 82 1 1
## 9745 25 82 1 1
## 9746 26 82 1 1
## 9747 27 82 1 1
## 9748 28 82 1 1
## 9749 29 82 1 1
## 9750 30 82 1 1
## 9751 31 82 1 1
## 9752 32 82 1 1
## 9753 33 82 1 1
## 9754 34 82 1 1
## 9755 35 82 1 1
## 9756 36 82 1 1
## 9757 37 82 1 1
## 9758 38 82 1 1
## 9759 39 82 1 1
## 9760 40 82 1 1
## 9761 41 82 1 1
## 9762 42 82 1 1
## 9763 43 82 1 1
## 9764 44 82 1 1
## 9765 45 82 1 1
## 9766 46 82 1 1
## 9767 47 82 1 1
## 9768 48 82 1 1
## 9769 49 82 1 1
## 9770 50 82 1 1
## 9771 51 82 1 1
## 9772 52 82 1 1
## 9773 53 82 1 1
## 9774 54 82 1 1
## 9775 55 82 1 1
## 9776 56 82 1 1
## 9777 57 82 1 1
## 9778 58 82 1 1
## 9779 59 82 1 1
## 9780 60 82 1 1
## 9781 61 82 1 1
## 9782 62 82 1 1
## 9783 63 82 1 1
## 9784 64 82 1 1
## 9785 65 82 1 1
## 9786 66 82 1 1
## 9787 67 82 1 1
## 9788 68 82 1 1
## 9789 69 82 1 1
## 9790 70 82 1 1
## 9791 71 82 1 1
## 9792 72 82 1 1
## 9793 73 82 1 1
## 9794 74 82 1 1
## 9795 75 82 1 1
## 9796 76 82 1 1
## 9797 77 82 1 1
## 9798 78 82 1 1
## 9799 79 82 1 1
## 9800 80 82 1 1
## 9801 81 82 1 1
## 9802 82 82 1 1
## 9803 83 82 1 1
## 9804 84 82 1 1
## 9805 85 82 1 1
## 9806 86 82 1 1
## 9807 87 82 1 1
## 9808 88 82 1 1
## 9809 89 82 1 1
## 9810 90 82 1 1
## 9811 91 82 1 1
## 9812 92 82 1 1
## 9813 93 82 1 1
## 9814 94 82 1 1
## 9815 95 82 1 1
## 9816 96 82 1 1
## 9817 97 82 1 1
## 9818 98 82 1 1
## 9819 99 82 1 1
## 9820 100 82 1 1
## 9821 101 82 1 1
## 9822 102 82 1 1
## 9823 103 82 1 1
## 9824 104 82 1 1
## 9825 105 82 1 1
## 9826 106 82 1 1
## 9827 107 82 1 1
## 9828 108 82 1 1
## 9829 109 82 1 1
## 9830 110 82 1 1
## 9831 111 82 1 1
## 9832 112 82 1 1
## 9833 113 82 1 1
## 9834 114 82 1 1
## 9835 115 82 1 1
## 9836 116 82 1 1
## 9837 117 82 1 1
## 9838 118 82 1 1
## 9839 119 82 1 1
## 9840 120 82 1 1
## 9841 1 83 1 1
## 9842 2 83 1 1
## 9843 3 83 1 1
## 9844 4 83 1 1
## 9845 5 83 1 1
## 9846 6 83 1 1
## 9847 7 83 1 1
## 9848 8 83 1 1
## 9849 9 83 1 1
## 9850 10 83 1 1
## 9851 11 83 1 1
## 9852 12 83 1 1
## 9853 13 83 1 1
## 9854 14 83 1 1
## 9855 15 83 1 1
## 9856 16 83 1 1
## 9857 17 83 1 1
## 9858 18 83 1 1
## 9859 19 83 1 1
## 9860 20 83 1 1
## 9861 21 83 1 1
## 9862 22 83 1 1
## 9863 23 83 1 1
## 9864 24 83 1 1
## 9865 25 83 1 1
## 9866 26 83 1 1
## 9867 27 83 1 1
## 9868 28 83 1 1
## 9869 29 83 1 1
## 9870 30 83 1 1
## 9871 31 83 1 1
## 9872 32 83 1 1
## 9873 33 83 1 1
## 9874 34 83 1 1
## 9875 35 83 1 1
## 9876 36 83 1 1
## 9877 37 83 1 1
## 9878 38 83 1 1
## 9879 39 83 1 1
## 9880 40 83 1 1
## 9881 41 83 1 1
## 9882 42 83 1 1
## 9883 43 83 1 1
## 9884 44 83 1 1
## 9885 45 83 1 1
## 9886 46 83 1 1
## 9887 47 83 1 1
## 9888 48 83 1 1
## 9889 49 83 1 1
## 9890 50 83 1 1
## 9891 51 83 1 1
## 9892 52 83 1 1
## 9893 53 83 1 1
## 9894 54 83 1 1
## 9895 55 83 1 1
## 9896 56 83 1 1
## 9897 57 83 1 1
## 9898 58 83 1 1
## 9899 59 83 1 1
## 9900 60 83 1 1
## 9901 61 83 1 1
## 9902 62 83 1 1
## 9903 63 83 1 1
## 9904 64 83 1 1
## 9905 65 83 1 1
## 9906 66 83 1 1
## 9907 67 83 1 1
## 9908 68 83 1 1
## 9909 69 83 1 1
## 9910 70 83 1 1
## 9911 71 83 1 1
## 9912 72 83 1 1
## 9913 73 83 1 1
## 9914 74 83 1 1
## 9915 75 83 1 1
## 9916 76 83 1 1
## 9917 77 83 1 1
## 9918 78 83 1 1
## 9919 79 83 1 1
## 9920 80 83 1 1
## 9921 81 83 1 1
## 9922 82 83 1 1
## 9923 83 83 1 1
## 9924 84 83 1 1
## 9925 85 83 1 1
## 9926 86 83 1 1
## 9927 87 83 1 1
## 9928 88 83 1 1
## 9929 89 83 1 1
## 9930 90 83 1 1
## 9931 91 83 1 1
## 9932 92 83 1 1
## 9933 93 83 1 1
## 9934 94 83 1 1
## 9935 95 83 1 1
## 9936 96 83 1 1
## 9937 97 83 1 1
## 9938 98 83 1 1
## 9939 99 83 1 1
## 9940 100 83 1 1
## 9941 101 83 1 1
## 9942 102 83 1 1
## 9943 103 83 1 1
## 9944 104 83 1 1
## 9945 105 83 1 1
## 9946 106 83 1 1
## 9947 107 83 1 1
## 9948 108 83 1 1
## 9949 109 83 1 1
## 9950 110 83 1 1
## 9951 111 83 1 1
## 9952 112 83 1 1
## 9953 113 83 1 1
## 9954 114 83 1 1
## 9955 115 83 1 1
## 9956 116 83 1 1
## 9957 117 83 1 1
## 9958 118 83 1 1
## 9959 119 83 1 1
## 9960 120 83 1 1
## 9961 1 84 1 1
## 9962 2 84 1 1
## 9963 3 84 1 1
## 9964 4 84 1 1
## 9965 5 84 1 1
## 9966 6 84 1 1
## 9967 7 84 1 1
## 9968 8 84 1 1
## 9969 9 84 1 1
## 9970 10 84 1 1
## 9971 11 84 1 1
## 9972 12 84 1 1
## 9973 13 84 1 1
## 9974 14 84 1 1
## 9975 15 84 1 1
## 9976 16 84 1 1
## 9977 17 84 1 1
## 9978 18 84 1 1
## 9979 19 84 1 1
## 9980 20 84 1 1
## 9981 21 84 1 1
## 9982 22 84 1 1
## 9983 23 84 1 1
## 9984 24 84 1 1
## 9985 25 84 1 1
## 9986 26 84 1 1
## 9987 27 84 1 1
## 9988 28 84 1 1
## 9989 29 84 1 1
## 9990 30 84 1 1
## 9991 31 84 1 1
## 9992 32 84 1 1
## 9993 33 84 1 1
## 9994 34 84 1 1
## 9995 35 84 1 1
## 9996 36 84 1 1
## 9997 37 84 1 1
## 9998 38 84 1 1
## 9999 39 84 1 1
## 10000 40 84 1 1
## 10001 41 84 1 1
## 10002 42 84 1 1
## 10003 43 84 1 1
## 10004 44 84 1 1
## 10005 45 84 1 1
## 10006 46 84 1 1
## 10007 47 84 1 1
## 10008 48 84 1 1
## 10009 49 84 1 1
## 10010 50 84 1 1
## 10011 51 84 1 1
## 10012 52 84 1 1
## 10013 53 84 1 1
## 10014 54 84 1 1
## 10015 55 84 1 1
## 10016 56 84 1 1
## 10017 57 84 1 1
## 10018 58 84 1 1
## 10019 59 84 1 1
## 10020 60 84 1 1
## 10021 61 84 1 1
## 10022 62 84 1 1
## 10023 63 84 1 1
## 10024 64 84 1 1
## 10025 65 84 1 1
## 10026 66 84 1 1
## 10027 67 84 1 1
## 10028 68 84 1 1
## 10029 69 84 1 1
## 10030 70 84 1 1
## 10031 71 84 1 1
## 10032 72 84 1 1
## 10033 73 84 1 1
## 10034 74 84 1 1
## 10035 75 84 1 1
## 10036 76 84 1 1
## 10037 77 84 1 1
## 10038 78 84 1 1
## 10039 79 84 1 1
## 10040 80 84 1 1
## 10041 81 84 1 1
## 10042 82 84 1 1
## 10043 83 84 1 1
## 10044 84 84 1 1
## 10045 85 84 1 1
## 10046 86 84 1 1
## 10047 87 84 1 1
## 10048 88 84 1 1
## 10049 89 84 1 1
## 10050 90 84 1 1
## 10051 91 84 1 1
## 10052 92 84 1 1
## 10053 93 84 1 1
## 10054 94 84 1 1
## 10055 95 84 1 1
## 10056 96 84 1 1
## 10057 97 84 1 1
## 10058 98 84 1 1
## 10059 99 84 1 1
## 10060 100 84 1 1
## 10061 101 84 1 1
## 10062 102 84 1 1
## 10063 103 84 1 1
## 10064 104 84 1 1
## 10065 105 84 1 1
## 10066 106 84 1 1
## 10067 107 84 1 1
## 10068 108 84 1 1
## 10069 109 84 1 1
## 10070 110 84 1 1
## 10071 111 84 1 1
## 10072 112 84 1 1
## 10073 113 84 1 1
## 10074 114 84 1 1
## 10075 115 84 1 1
## 10076 116 84 1 1
## 10077 117 84 1 1
## 10078 118 84 1 1
## 10079 119 84 1 1
## 10080 120 84 1 1
## 10081 1 85 1 1
## 10082 2 85 1 1
## 10083 3 85 1 1
## 10084 4 85 1 1
## 10085 5 85 1 1
## 10086 6 85 1 1
## 10087 7 85 1 1
## 10088 8 85 1 1
## 10089 9 85 1 1
## 10090 10 85 1 1
## 10091 11 85 1 1
## 10092 12 85 1 1
## 10093 13 85 1 1
## 10094 14 85 1 1
## 10095 15 85 1 1
## 10096 16 85 1 1
## 10097 17 85 1 1
## 10098 18 85 1 1
## 10099 19 85 1 1
## 10100 20 85 1 1
## 10101 21 85 1 1
## 10102 22 85 1 1
## 10103 23 85 1 1
## 10104 24 85 1 1
## 10105 25 85 1 1
## 10106 26 85 1 1
## 10107 27 85 1 1
## 10108 28 85 1 1
## 10109 29 85 1 1
## 10110 30 85 1 1
## 10111 31 85 1 1
## 10112 32 85 1 1
## 10113 33 85 1 1
## 10114 34 85 1 1
## 10115 35 85 1 1
## 10116 36 85 1 1
## 10117 37 85 1 1
## 10118 38 85 1 1
## 10119 39 85 1 1
## 10120 40 85 1 1
## 10121 41 85 1 1
## 10122 42 85 1 1
## 10123 43 85 1 1
## 10124 44 85 1 1
## 10125 45 85 1 1
## 10126 46 85 1 1
## 10127 47 85 1 1
## 10128 48 85 1 1
## 10129 49 85 1 1
## 10130 50 85 1 1
## 10131 51 85 1 1
## 10132 52 85 1 1
## 10133 53 85 1 1
## 10134 54 85 1 1
## 10135 55 85 1 1
## 10136 56 85 1 1
## 10137 57 85 1 1
## 10138 58 85 1 1
## 10139 59 85 1 1
## 10140 60 85 1 1
## 10141 61 85 1 1
## 10142 62 85 1 1
## 10143 63 85 1 1
## 10144 64 85 1 1
## 10145 65 85 1 1
## 10146 66 85 1 1
## 10147 67 85 1 1
## 10148 68 85 1 1
## 10149 69 85 1 1
## 10150 70 85 1 1
## 10151 71 85 1 1
## 10152 72 85 1 1
## 10153 73 85 1 1
## 10154 74 85 1 1
## 10155 75 85 1 1
## 10156 76 85 1 1
## 10157 77 85 1 1
## 10158 78 85 1 1
## 10159 79 85 1 1
## 10160 80 85 1 1
## 10161 81 85 1 1
## 10162 82 85 1 1
## 10163 83 85 1 1
## 10164 84 85 1 1
## 10165 85 85 1 1
## 10166 86 85 1 1
## 10167 87 85 1 1
## 10168 88 85 1 1
## 10169 89 85 1 1
## 10170 90 85 1 1
## 10171 91 85 1 1
## 10172 92 85 1 1
## 10173 93 85 1 1
## 10174 94 85 1 1
## 10175 95 85 1 1
## 10176 96 85 1 1
## 10177 97 85 1 1
## 10178 98 85 1 1
## 10179 99 85 1 1
## 10180 100 85 1 1
## 10181 101 85 1 1
## 10182 102 85 1 1
## 10183 103 85 1 1
## 10184 104 85 1 1
## 10185 105 85 1 1
## 10186 106 85 1 1
## 10187 107 85 1 1
## 10188 108 85 1 1
## 10189 109 85 1 1
## 10190 110 85 1 1
## 10191 111 85 1 1
## 10192 112 85 1 1
## 10193 113 85 1 1
## 10194 114 85 1 1
## 10195 115 85 1 1
## 10196 116 85 1 1
## 10197 117 85 1 1
## 10198 118 85 1 1
## 10199 119 85 1 1
## 10200 120 85 1 1
## 10201 1 86 1 1
## 10202 2 86 1 1
## 10203 3 86 1 1
## 10204 4 86 1 1
## 10205 5 86 1 1
## 10206 6 86 1 1
## 10207 7 86 1 1
## 10208 8 86 1 1
## 10209 9 86 1 1
## 10210 10 86 1 1
## 10211 11 86 1 1
## 10212 12 86 1 1
## 10213 13 86 1 1
## 10214 14 86 1 1
## 10215 15 86 1 1
## 10216 16 86 1 1
## 10217 17 86 1 1
## 10218 18 86 1 1
## 10219 19 86 1 1
## 10220 20 86 1 1
## 10221 21 86 1 1
## 10222 22 86 1 1
## 10223 23 86 1 1
## 10224 24 86 1 1
## 10225 25 86 1 1
## 10226 26 86 1 1
## 10227 27 86 1 1
## 10228 28 86 1 1
## 10229 29 86 1 1
## 10230 30 86 1 1
## 10231 31 86 1 1
## 10232 32 86 1 1
## 10233 33 86 1 1
## 10234 34 86 1 1
## 10235 35 86 1 1
## 10236 36 86 1 1
## 10237 37 86 1 1
## 10238 38 86 1 1
## 10239 39 86 1 1
## 10240 40 86 1 1
## 10241 41 86 1 1
## 10242 42 86 1 1
## 10243 43 86 1 1
## 10244 44 86 1 1
## 10245 45 86 1 1
## 10246 46 86 1 1
## 10247 47 86 1 1
## 10248 48 86 1 1
## 10249 49 86 1 1
## 10250 50 86 1 1
## 10251 51 86 1 1
## 10252 52 86 1 1
## 10253 53 86 1 1
## 10254 54 86 1 1
## 10255 55 86 1 1
## 10256 56 86 1 1
## 10257 57 86 1 1
## 10258 58 86 1 1
## 10259 59 86 1 1
## 10260 60 86 1 1
## 10261 61 86 1 1
## 10262 62 86 1 1
## 10263 63 86 1 1
## 10264 64 86 1 1
## 10265 65 86 1 1
## 10266 66 86 1 1
## 10267 67 86 1 1
## 10268 68 86 1 1
## 10269 69 86 1 1
## 10270 70 86 1 1
## 10271 71 86 1 1
## 10272 72 86 1 1
## 10273 73 86 1 1
## 10274 74 86 1 1
## 10275 75 86 1 1
## 10276 76 86 1 1
## 10277 77 86 1 1
## 10278 78 86 1 1
## 10279 79 86 1 1
## 10280 80 86 1 1
## 10281 81 86 1 1
## 10282 82 86 1 1
## 10283 83 86 1 1
## 10284 84 86 1 1
## 10285 85 86 1 1
## 10286 86 86 1 1
## 10287 87 86 1 1
## 10288 88 86 1 1
## 10289 89 86 1 1
## 10290 90 86 1 1
## 10291 91 86 1 1
## 10292 92 86 1 1
## 10293 93 86 1 1
## 10294 94 86 1 1
## 10295 95 86 1 1
## 10296 96 86 1 1
## 10297 97 86 1 1
## 10298 98 86 1 1
## 10299 99 86 1 1
## 10300 100 86 1 1
## 10301 101 86 1 1
## 10302 102 86 1 1
## 10303 103 86 1 1
## 10304 104 86 1 1
## 10305 105 86 1 1
## 10306 106 86 1 1
## 10307 107 86 1 1
## 10308 108 86 1 1
## 10309 109 86 1 1
## 10310 110 86 1 1
## 10311 111 86 1 1
## 10312 112 86 1 1
## 10313 113 86 1 1
## 10314 114 86 1 1
## 10315 115 86 1 1
## 10316 116 86 1 1
## 10317 117 86 1 1
## 10318 118 86 1 1
## 10319 119 86 1 1
## 10320 120 86 1 1
## 10321 1 87 1 1
## 10322 2 87 1 1
## 10323 3 87 1 1
## 10324 4 87 1 1
## 10325 5 87 1 1
## 10326 6 87 1 1
## 10327 7 87 1 1
## 10328 8 87 1 1
## 10329 9 87 1 1
## 10330 10 87 1 1
## 10331 11 87 1 1
## 10332 12 87 1 1
## 10333 13 87 1 1
## 10334 14 87 1 1
## 10335 15 87 1 1
## 10336 16 87 1 1
## 10337 17 87 1 1
## 10338 18 87 1 1
## 10339 19 87 1 1
## 10340 20 87 1 1
## 10341 21 87 1 1
## 10342 22 87 1 1
## 10343 23 87 1 1
## 10344 24 87 1 1
## 10345 25 87 1 1
## 10346 26 87 1 1
## 10347 27 87 1 1
## 10348 28 87 1 1
## 10349 29 87 1 1
## 10350 30 87 1 1
## 10351 31 87 1 1
## 10352 32 87 1 1
## 10353 33 87 1 1
## 10354 34 87 1 1
## 10355 35 87 1 1
## 10356 36 87 1 1
## 10357 37 87 1 1
## 10358 38 87 1 1
## 10359 39 87 1 1
## 10360 40 87 1 1
## 10361 41 87 1 1
## 10362 42 87 1 1
## 10363 43 87 1 1
## 10364 44 87 1 1
## 10365 45 87 1 1
## 10366 46 87 1 1
## 10367 47 87 1 1
## 10368 48 87 1 1
## 10369 49 87 1 1
## 10370 50 87 1 1
## 10371 51 87 1 1
## 10372 52 87 1 1
## 10373 53 87 1 1
## 10374 54 87 1 1
## 10375 55 87 1 1
## 10376 56 87 1 1
## 10377 57 87 1 1
## 10378 58 87 1 1
## 10379 59 87 1 1
## 10380 60 87 1 1
## 10381 61 87 1 1
## 10382 62 87 1 1
## 10383 63 87 1 1
## 10384 64 87 1 1
## 10385 65 87 1 1
## 10386 66 87 1 1
## 10387 67 87 1 1
## 10388 68 87 1 1
## 10389 69 87 1 1
## 10390 70 87 1 1
## 10391 71 87 1 1
## 10392 72 87 1 1
## 10393 73 87 1 1
## 10394 74 87 1 1
## 10395 75 87 1 1
## 10396 76 87 1 1
## 10397 77 87 1 1
## 10398 78 87 1 1
## 10399 79 87 1 1
## 10400 80 87 1 1
## 10401 81 87 1 1
## 10402 82 87 1 1
## 10403 83 87 1 1
## 10404 84 87 1 1
## 10405 85 87 1 1
## 10406 86 87 1 1
## 10407 87 87 1 1
## 10408 88 87 1 1
## 10409 89 87 1 1
## 10410 90 87 1 1
## 10411 91 87 1 1
## 10412 92 87 1 1
## 10413 93 87 1 1
## 10414 94 87 1 1
## 10415 95 87 1 1
## 10416 96 87 1 1
## 10417 97 87 1 1
## 10418 98 87 1 1
## 10419 99 87 1 1
## 10420 100 87 1 1
## 10421 101 87 1 1
## 10422 102 87 1 1
## 10423 103 87 1 1
## 10424 104 87 1 1
## 10425 105 87 1 1
## 10426 106 87 1 1
## 10427 107 87 1 1
## 10428 108 87 1 1
## 10429 109 87 1 1
## 10430 110 87 1 1
## 10431 111 87 1 1
## 10432 112 87 1 1
## 10433 113 87 1 1
## 10434 114 87 1 1
## 10435 115 87 1 1
## 10436 116 87 1 1
## 10437 117 87 1 1
## 10438 118 87 1 1
## 10439 119 87 1 1
## 10440 120 87 1 1
## 10441 1 88 1 1
## 10442 2 88 1 1
## 10443 3 88 1 1
## 10444 4 88 1 1
## 10445 5 88 1 1
## 10446 6 88 1 1
## 10447 7 88 1 1
## 10448 8 88 1 1
## 10449 9 88 1 1
## 10450 10 88 1 1
## 10451 11 88 1 1
## 10452 12 88 1 1
## 10453 13 88 1 1
## 10454 14 88 1 1
## 10455 15 88 1 1
## 10456 16 88 1 1
## 10457 17 88 1 1
## 10458 18 88 1 1
## 10459 19 88 1 1
## 10460 20 88 1 1
## 10461 21 88 1 1
## 10462 22 88 1 1
## 10463 23 88 1 1
## 10464 24 88 1 1
## 10465 25 88 1 1
## 10466 26 88 1 1
## 10467 27 88 1 1
## 10468 28 88 1 1
## 10469 29 88 1 1
## 10470 30 88 1 1
## 10471 31 88 1 1
## 10472 32 88 1 1
## 10473 33 88 1 1
## 10474 34 88 1 1
## 10475 35 88 1 1
## 10476 36 88 1 1
## 10477 37 88 1 1
## 10478 38 88 1 1
## 10479 39 88 1 1
## 10480 40 88 1 1
## 10481 41 88 1 1
## 10482 42 88 1 1
## 10483 43 88 1 1
## 10484 44 88 1 1
## 10485 45 88 1 1
## 10486 46 88 1 1
## 10487 47 88 1 1
## 10488 48 88 1 1
## 10489 49 88 1 1
## 10490 50 88 1 1
## 10491 51 88 1 1
## 10492 52 88 1 1
## 10493 53 88 1 1
## 10494 54 88 1 1
## 10495 55 88 1 1
## 10496 56 88 1 1
## 10497 57 88 1 1
## 10498 58 88 1 1
## 10499 59 88 1 1
## 10500 60 88 1 1
## 10501 61 88 1 1
## 10502 62 88 1 1
## 10503 63 88 1 1
## 10504 64 88 1 1
## 10505 65 88 1 1
## 10506 66 88 1 1
## 10507 67 88 1 1
## 10508 68 88 1 1
## 10509 69 88 1 1
## 10510 70 88 1 1
## 10511 71 88 1 1
## 10512 72 88 1 1
## 10513 73 88 1 1
## 10514 74 88 1 1
## 10515 75 88 1 1
## 10516 76 88 1 1
## 10517 77 88 1 1
## 10518 78 88 1 1
## 10519 79 88 1 1
## 10520 80 88 1 1
## 10521 81 88 1 1
## 10522 82 88 1 1
## 10523 83 88 1 1
## 10524 84 88 1 1
## 10525 85 88 1 1
## 10526 86 88 1 1
## 10527 87 88 1 1
## 10528 88 88 1 1
## 10529 89 88 1 1
## 10530 90 88 1 1
## 10531 91 88 1 1
## 10532 92 88 1 1
## 10533 93 88 1 1
## 10534 94 88 1 1
## 10535 95 88 1 1
## 10536 96 88 1 1
## 10537 97 88 1 1
## 10538 98 88 1 1
## 10539 99 88 1 1
## 10540 100 88 1 1
## 10541 101 88 1 1
## 10542 102 88 1 1
## 10543 103 88 1 1
## 10544 104 88 1 1
## 10545 105 88 1 1
## 10546 106 88 1 1
## 10547 107 88 1 1
## 10548 108 88 1 1
## 10549 109 88 1 1
## 10550 110 88 1 1
## 10551 111 88 1 1
## 10552 112 88 1 1
## 10553 113 88 1 1
## 10554 114 88 1 1
## 10555 115 88 1 1
## 10556 116 88 1 1
## 10557 117 88 1 1
## 10558 118 88 1 1
## 10559 119 88 1 1
## 10560 120 88 1 1
## 10561 1 89 1 1
## 10562 2 89 1 1
## 10563 3 89 1 1
## 10564 4 89 1 1
## 10565 5 89 1 1
## 10566 6 89 1 1
## 10567 7 89 1 1
## 10568 8 89 1 1
## 10569 9 89 1 1
## 10570 10 89 1 1
## 10571 11 89 1 1
## 10572 12 89 1 1
## 10573 13 89 1 1
## 10574 14 89 1 1
## 10575 15 89 1 1
## 10576 16 89 1 1
## 10577 17 89 1 1
## 10578 18 89 1 1
## 10579 19 89 1 1
## 10580 20 89 1 1
## 10581 21 89 1 1
## 10582 22 89 1 1
## 10583 23 89 1 1
## 10584 24 89 1 1
## 10585 25 89 1 1
## 10586 26 89 1 1
## 10587 27 89 1 1
## 10588 28 89 1 1
## 10589 29 89 1 1
## 10590 30 89 1 1
## 10591 31 89 1 1
## 10592 32 89 1 1
## 10593 33 89 1 1
## 10594 34 89 1 1
## 10595 35 89 1 1
## 10596 36 89 1 1
## 10597 37 89 1 1
## 10598 38 89 1 1
## 10599 39 89 1 1
## 10600 40 89 1 1
## 10601 41 89 1 1
## 10602 42 89 1 1
## 10603 43 89 1 1
## 10604 44 89 1 1
## 10605 45 89 1 1
## 10606 46 89 1 1
## 10607 47 89 1 1
## 10608 48 89 1 1
## 10609 49 89 1 1
## 10610 50 89 1 1
## 10611 51 89 1 1
## 10612 52 89 1 1
## 10613 53 89 1 1
## 10614 54 89 1 1
## 10615 55 89 1 1
## 10616 56 89 1 1
## 10617 57 89 1 1
## 10618 58 89 1 1
## 10619 59 89 1 1
## 10620 60 89 1 1
## 10621 61 89 1 1
## 10622 62 89 1 1
## 10623 63 89 1 1
## 10624 64 89 1 1
## 10625 65 89 1 1
## 10626 66 89 1 1
## 10627 67 89 1 1
## 10628 68 89 1 1
## 10629 69 89 1 1
## 10630 70 89 1 1
## 10631 71 89 1 1
## 10632 72 89 1 1
## 10633 73 89 1 1
## 10634 74 89 1 1
## 10635 75 89 1 1
## 10636 76 89 1 1
## 10637 77 89 1 1
## 10638 78 89 1 1
## 10639 79 89 1 1
## 10640 80 89 1 1
## 10641 81 89 1 1
## 10642 82 89 1 1
## 10643 83 89 1 1
## 10644 84 89 1 1
## 10645 85 89 1 1
## 10646 86 89 1 1
## 10647 87 89 1 1
## 10648 88 89 1 1
## 10649 89 89 1 1
## 10650 90 89 1 1
## 10651 91 89 1 1
## 10652 92 89 1 1
## 10653 93 89 1 1
## 10654 94 89 1 1
## 10655 95 89 1 1
## 10656 96 89 1 1
## 10657 97 89 1 1
## 10658 98 89 1 1
## 10659 99 89 1 1
## 10660 100 89 1 1
## 10661 101 89 1 1
## 10662 102 89 1 1
## 10663 103 89 1 1
## 10664 104 89 1 1
## 10665 105 89 1 1
## 10666 106 89 1 1
## 10667 107 89 1 1
## 10668 108 89 1 1
## 10669 109 89 1 1
## 10670 110 89 1 1
## 10671 111 89 1 1
## 10672 112 89 1 1
## 10673 113 89 1 1
## 10674 114 89 1 1
## 10675 115 89 1 1
## 10676 116 89 1 1
## 10677 117 89 1 1
## 10678 118 89 1 1
## 10679 119 89 1 1
## 10680 120 89 1 1
## 10681 1 90 1 1
## 10682 2 90 1 1
## 10683 3 90 1 1
## 10684 4 90 1 1
## 10685 5 90 1 1
## 10686 6 90 1 1
## 10687 7 90 1 1
## 10688 8 90 1 1
## 10689 9 90 1 1
## 10690 10 90 1 1
## 10691 11 90 1 1
## 10692 12 90 1 1
## 10693 13 90 1 1
## 10694 14 90 1 1
## 10695 15 90 1 1
## 10696 16 90 1 1
## 10697 17 90 1 1
## 10698 18 90 1 1
## 10699 19 90 1 1
## 10700 20 90 1 1
## 10701 21 90 1 1
## 10702 22 90 1 1
## 10703 23 90 1 1
## 10704 24 90 1 1
## 10705 25 90 1 1
## 10706 26 90 1 1
## 10707 27 90 1 1
## 10708 28 90 1 1
## 10709 29 90 1 1
## 10710 30 90 1 1
## 10711 31 90 1 1
## 10712 32 90 1 1
## 10713 33 90 1 1
## 10714 34 90 1 1
## 10715 35 90 1 1
## 10716 36 90 1 1
## 10717 37 90 1 1
## 10718 38 90 1 1
## 10719 39 90 1 1
## 10720 40 90 1 1
## 10721 41 90 1 1
## 10722 42 90 1 1
## 10723 43 90 1 1
## 10724 44 90 1 1
## 10725 45 90 1 1
## 10726 46 90 1 1
## 10727 47 90 1 1
## 10728 48 90 1 1
## 10729 49 90 1 1
## 10730 50 90 1 1
## 10731 51 90 1 1
## 10732 52 90 1 1
## 10733 53 90 1 1
## 10734 54 90 1 1
## 10735 55 90 1 1
## 10736 56 90 1 1
## 10737 57 90 1 1
## 10738 58 90 1 1
## 10739 59 90 1 1
## 10740 60 90 1 1
## 10741 61 90 1 1
## 10742 62 90 1 1
## 10743 63 90 1 1
## 10744 64 90 1 1
## 10745 65 90 1 1
## 10746 66 90 1 1
## 10747 67 90 1 1
## 10748 68 90 1 1
## 10749 69 90 1 1
## 10750 70 90 1 1
## 10751 71 90 1 1
## 10752 72 90 1 1
## 10753 73 90 1 1
## 10754 74 90 1 1
## 10755 75 90 1 1
## 10756 76 90 1 1
## 10757 77 90 1 1
## 10758 78 90 1 1
## 10759 79 90 1 1
## 10760 80 90 1 1
## 10761 81 90 1 1
## 10762 82 90 1 1
## 10763 83 90 1 1
## 10764 84 90 1 1
## 10765 85 90 1 1
## 10766 86 90 1 1
## 10767 87 90 1 1
## 10768 88 90 1 1
## 10769 89 90 1 1
## 10770 90 90 1 1
## 10771 91 90 1 1
## 10772 92 90 1 1
## 10773 93 90 1 1
## 10774 94 90 1 1
## 10775 95 90 1 1
## 10776 96 90 1 1
## 10777 97 90 1 1
## 10778 98 90 1 1
## 10779 99 90 1 1
## 10780 100 90 1 1
## 10781 101 90 1 1
## 10782 102 90 1 1
## 10783 103 90 1 1
## 10784 104 90 1 1
## 10785 105 90 1 1
## 10786 106 90 1 1
## 10787 107 90 1 1
## 10788 108 90 1 1
## 10789 109 90 1 1
## 10790 110 90 1 1
## 10791 111 90 1 1
## 10792 112 90 1 1
## 10793 113 90 1 1
## 10794 114 90 1 1
## 10795 115 90 1 1
## 10796 116 90 1 1
## 10797 117 90 1 1
## 10798 118 90 1 1
## 10799 119 90 1 1
## 10800 120 90 1 1
## 10801 1 91 1 1
## 10802 2 91 1 1
## 10803 3 91 1 1
## 10804 4 91 1 1
## 10805 5 91 1 1
## 10806 6 91 1 1
## 10807 7 91 1 1
## 10808 8 91 1 1
## 10809 9 91 1 1
## 10810 10 91 1 1
## 10811 11 91 1 1
## 10812 12 91 1 1
## 10813 13 91 1 1
## 10814 14 91 1 1
## 10815 15 91 1 1
## 10816 16 91 1 1
## 10817 17 91 1 1
## 10818 18 91 1 1
## 10819 19 91 1 1
## 10820 20 91 1 1
## 10821 21 91 1 1
## 10822 22 91 1 1
## 10823 23 91 1 1
## 10824 24 91 1 1
## 10825 25 91 1 1
## 10826 26 91 1 1
## 10827 27 91 1 1
## 10828 28 91 1 1
## 10829 29 91 1 1
## 10830 30 91 1 1
## 10831 31 91 1 1
## 10832 32 91 1 1
## 10833 33 91 1 1
## 10834 34 91 1 1
## 10835 35 91 1 1
## 10836 36 91 1 1
## 10837 37 91 1 1
## 10838 38 91 1 1
## 10839 39 91 1 1
## 10840 40 91 1 1
## 10841 41 91 1 1
## 10842 42 91 1 1
## 10843 43 91 1 1
## 10844 44 91 1 1
## 10845 45 91 1 1
## 10846 46 91 1 1
## 10847 47 91 1 1
## 10848 48 91 1 1
## 10849 49 91 1 1
## 10850 50 91 1 1
## 10851 51 91 1 1
## 10852 52 91 1 1
## 10853 53 91 1 1
## 10854 54 91 1 1
## 10855 55 91 1 1
## 10856 56 91 1 1
## 10857 57 91 1 1
## 10858 58 91 1 1
## 10859 59 91 1 1
## 10860 60 91 1 1
## 10861 61 91 1 1
## 10862 62 91 1 1
## 10863 63 91 1 1
## 10864 64 91 1 1
## 10865 65 91 1 1
## 10866 66 91 1 1
## 10867 67 91 1 1
## 10868 68 91 1 1
## 10869 69 91 1 1
## 10870 70 91 1 1
## 10871 71 91 1 1
## 10872 72 91 1 1
## 10873 73 91 1 1
## 10874 74 91 1 1
## 10875 75 91 1 1
## 10876 76 91 1 1
## 10877 77 91 1 1
## 10878 78 91 1 1
## 10879 79 91 1 1
## 10880 80 91 1 1
## 10881 81 91 1 1
## 10882 82 91 1 1
## 10883 83 91 1 1
## 10884 84 91 1 1
## 10885 85 91 1 1
## 10886 86 91 1 1
## 10887 87 91 1 1
## 10888 88 91 1 1
## 10889 89 91 1 1
## 10890 90 91 1 1
## 10891 91 91 1 1
## 10892 92 91 1 1
## 10893 93 91 1 1
## 10894 94 91 1 1
## 10895 95 91 1 1
## 10896 96 91 1 1
## 10897 97 91 1 1
## 10898 98 91 1 1
## 10899 99 91 1 1
## 10900 100 91 1 1
## 10901 101 91 1 1
## 10902 102 91 1 1
## 10903 103 91 1 1
## 10904 104 91 1 1
## 10905 105 91 1 1
## 10906 106 91 1 1
## 10907 107 91 1 1
## 10908 108 91 1 1
## 10909 109 91 1 1
## 10910 110 91 1 1
## 10911 111 91 1 1
## 10912 112 91 1 1
## 10913 113 91 1 1
## 10914 114 91 1 1
## 10915 115 91 1 1
## 10916 116 91 1 1
## 10917 117 91 1 1
## 10918 118 91 1 1
## 10919 119 91 1 1
## 10920 120 91 1 1
## 10921 1 92 1 1
## 10922 2 92 1 1
## 10923 3 92 1 1
## 10924 4 92 1 1
## 10925 5 92 1 1
## 10926 6 92 1 1
## 10927 7 92 1 1
## 10928 8 92 1 1
## 10929 9 92 1 1
## 10930 10 92 1 1
## 10931 11 92 1 1
## 10932 12 92 1 1
## 10933 13 92 1 1
## 10934 14 92 1 1
## 10935 15 92 1 1
## 10936 16 92 1 1
## 10937 17 92 1 1
## 10938 18 92 1 1
## 10939 19 92 1 1
## 10940 20 92 1 1
## 10941 21 92 1 1
## 10942 22 92 1 1
## 10943 23 92 1 1
## 10944 24 92 1 1
## 10945 25 92 1 1
## 10946 26 92 1 1
## 10947 27 92 1 1
## 10948 28 92 1 1
## 10949 29 92 1 1
## 10950 30 92 1 1
## 10951 31 92 1 1
## 10952 32 92 1 1
## 10953 33 92 1 1
## 10954 34 92 1 1
## 10955 35 92 1 1
## 10956 36 92 1 1
## 10957 37 92 1 1
## 10958 38 92 1 1
## 10959 39 92 1 1
## 10960 40 92 1 1
## 10961 41 92 1 1
## 10962 42 92 1 1
## 10963 43 92 1 1
## 10964 44 92 1 1
## 10965 45 92 1 1
## 10966 46 92 1 1
## 10967 47 92 1 1
## 10968 48 92 1 1
## 10969 49 92 1 1
## 10970 50 92 1 1
## 10971 51 92 1 1
## 10972 52 92 1 1
## 10973 53 92 1 1
## 10974 54 92 1 1
## 10975 55 92 1 1
## 10976 56 92 1 1
## 10977 57 92 1 1
## 10978 58 92 1 1
## 10979 59 92 1 1
## 10980 60 92 1 1
## 10981 61 92 1 1
## 10982 62 92 1 1
## 10983 63 92 1 1
## 10984 64 92 1 1
## 10985 65 92 1 1
## 10986 66 92 1 1
## 10987 67 92 1 1
## 10988 68 92 1 1
## 10989 69 92 1 1
## 10990 70 92 1 1
## 10991 71 92 1 1
## 10992 72 92 1 1
## 10993 73 92 1 1
## 10994 74 92 1 1
## 10995 75 92 1 1
## 10996 76 92 1 1
## 10997 77 92 1 1
## 10998 78 92 1 1
## 10999 79 92 1 1
## 11000 80 92 1 1
## 11001 81 92 1 1
## 11002 82 92 1 1
## 11003 83 92 1 1
## 11004 84 92 1 1
## 11005 85 92 1 1
## 11006 86 92 1 1
## 11007 87 92 1 1
## 11008 88 92 1 1
## 11009 89 92 1 1
## 11010 90 92 1 1
## 11011 91 92 1 1
## 11012 92 92 1 1
## 11013 93 92 1 1
## 11014 94 92 1 1
## 11015 95 92 1 1
## 11016 96 92 1 1
## 11017 97 92 1 1
## 11018 98 92 1 1
## 11019 99 92 1 1
## 11020 100 92 1 1
## 11021 101 92 1 1
## 11022 102 92 1 1
## 11023 103 92 1 1
## 11024 104 92 1 1
## 11025 105 92 1 1
## 11026 106 92 1 1
## 11027 107 92 1 1
## 11028 108 92 1 1
## 11029 109 92 1 1
## 11030 110 92 1 1
## 11031 111 92 1 1
## 11032 112 92 1 1
## 11033 113 92 1 1
## 11034 114 92 1 1
## 11035 115 92 1 1
## 11036 116 92 1 1
## 11037 117 92 1 1
## 11038 118 92 1 1
## 11039 119 92 1 1
## 11040 120 92 1 1
## 11041 1 93 1 1
## 11042 2 93 1 1
## 11043 3 93 1 1
## 11044 4 93 1 1
## 11045 5 93 1 1
## 11046 6 93 1 1
## 11047 7 93 1 1
## 11048 8 93 1 1
## 11049 9 93 1 1
## 11050 10 93 1 1
## 11051 11 93 1 1
## 11052 12 93 1 1
## 11053 13 93 1 1
## 11054 14 93 1 1
## 11055 15 93 1 1
## 11056 16 93 1 1
## 11057 17 93 1 1
## 11058 18 93 1 1
## 11059 19 93 1 1
## 11060 20 93 1 1
## 11061 21 93 1 1
## 11062 22 93 1 1
## 11063 23 93 1 1
## 11064 24 93 1 1
## 11065 25 93 1 1
## 11066 26 93 1 1
## 11067 27 93 1 1
## 11068 28 93 1 1
## 11069 29 93 1 1
## 11070 30 93 1 1
## 11071 31 93 1 1
## 11072 32 93 1 1
## 11073 33 93 1 1
## 11074 34 93 1 1
## 11075 35 93 1 1
## 11076 36 93 1 1
## 11077 37 93 1 1
## 11078 38 93 1 1
## 11079 39 93 1 1
## 11080 40 93 1 1
## 11081 41 93 1 1
## 11082 42 93 1 1
## 11083 43 93 1 1
## 11084 44 93 1 1
## 11085 45 93 1 1
## 11086 46 93 1 1
## 11087 47 93 1 1
## 11088 48 93 1 1
## 11089 49 93 1 1
## 11090 50 93 1 1
## 11091 51 93 1 1
## 11092 52 93 1 1
## 11093 53 93 1 1
## 11094 54 93 1 1
## 11095 55 93 1 1
## 11096 56 93 1 1
## 11097 57 93 1 1
## 11098 58 93 1 1
## 11099 59 93 1 1
## 11100 60 93 1 1
## 11101 61 93 1 1
## 11102 62 93 1 1
## 11103 63 93 1 1
## 11104 64 93 1 1
## 11105 65 93 1 1
## 11106 66 93 1 1
## 11107 67 93 1 1
## 11108 68 93 1 1
## 11109 69 93 1 1
## 11110 70 93 1 1
## 11111 71 93 1 1
## 11112 72 93 1 1
## 11113 73 93 1 1
## 11114 74 93 1 1
## 11115 75 93 1 1
## 11116 76 93 1 1
## 11117 77 93 1 1
## 11118 78 93 1 1
## 11119 79 93 1 1
## 11120 80 93 1 1
## 11121 81 93 1 1
## 11122 82 93 1 1
## 11123 83 93 1 1
## 11124 84 93 1 1
## 11125 85 93 1 1
## 11126 86 93 1 1
## 11127 87 93 1 1
## 11128 88 93 1 1
## 11129 89 93 1 1
## 11130 90 93 1 1
## 11131 91 93 1 1
## 11132 92 93 1 1
## 11133 93 93 1 1
## 11134 94 93 1 1
## 11135 95 93 1 1
## 11136 96 93 1 1
## 11137 97 93 1 1
## 11138 98 93 1 1
## 11139 99 93 1 1
## 11140 100 93 1 1
## 11141 101 93 1 1
## 11142 102 93 1 1
## 11143 103 93 1 1
## 11144 104 93 1 1
## 11145 105 93 1 1
## 11146 106 93 1 1
## 11147 107 93 1 1
## 11148 108 93 1 1
## 11149 109 93 1 1
## 11150 110 93 1 1
## 11151 111 93 1 1
## 11152 112 93 1 1
## 11153 113 93 1 1
## 11154 114 93 1 1
## 11155 115 93 1 1
## 11156 116 93 1 1
## 11157 117 93 1 1
## 11158 118 93 1 1
## 11159 119 93 1 1
## 11160 120 93 1 1
## 11161 1 94 1 1
## 11162 2 94 1 1
## 11163 3 94 1 1
## 11164 4 94 1 1
## 11165 5 94 1 1
## 11166 6 94 1 1
## 11167 7 94 1 1
## 11168 8 94 1 1
## 11169 9 94 1 1
## 11170 10 94 1 1
## 11171 11 94 1 1
## 11172 12 94 1 1
## 11173 13 94 1 1
## 11174 14 94 1 1
## 11175 15 94 1 1
## 11176 16 94 1 1
## 11177 17 94 1 1
## 11178 18 94 1 1
## 11179 19 94 1 1
## 11180 20 94 1 1
## 11181 21 94 1 1
## 11182 22 94 1 1
## 11183 23 94 1 1
## 11184 24 94 1 1
## 11185 25 94 1 1
## 11186 26 94 1 1
## 11187 27 94 1 1
## 11188 28 94 1 1
## 11189 29 94 1 1
## 11190 30 94 1 1
## 11191 31 94 1 1
## 11192 32 94 1 1
## 11193 33 94 1 1
## 11194 34 94 1 1
## 11195 35 94 1 1
## 11196 36 94 1 1
## 11197 37 94 1 1
## 11198 38 94 1 1
## 11199 39 94 1 1
## 11200 40 94 1 1
## 11201 41 94 1 1
## 11202 42 94 1 1
## 11203 43 94 1 1
## 11204 44 94 1 1
## 11205 45 94 1 1
## 11206 46 94 1 1
## 11207 47 94 1 1
## 11208 48 94 1 1
## 11209 49 94 1 1
## 11210 50 94 1 1
## 11211 51 94 1 1
## 11212 52 94 1 1
## 11213 53 94 1 1
## 11214 54 94 1 1
## 11215 55 94 1 1
## 11216 56 94 1 1
## 11217 57 94 1 1
## 11218 58 94 1 1
## 11219 59 94 1 1
## 11220 60 94 1 1
## 11221 61 94 1 1
## 11222 62 94 1 1
## 11223 63 94 1 1
## 11224 64 94 1 1
## 11225 65 94 1 1
## 11226 66 94 1 1
## 11227 67 94 1 1
## 11228 68 94 1 1
## 11229 69 94 1 1
## 11230 70 94 1 1
## 11231 71 94 1 1
## 11232 72 94 1 1
## 11233 73 94 1 1
## 11234 74 94 1 1
## 11235 75 94 1 1
## 11236 76 94 1 1
## 11237 77 94 1 1
## 11238 78 94 1 1
## 11239 79 94 1 1
## 11240 80 94 1 1
## 11241 81 94 1 1
## 11242 82 94 1 1
## 11243 83 94 1 1
## 11244 84 94 1 1
## 11245 85 94 1 1
## 11246 86 94 1 1
## 11247 87 94 1 1
## 11248 88 94 1 1
## 11249 89 94 1 1
## 11250 90 94 1 1
## 11251 91 94 1 1
## 11252 92 94 1 1
## 11253 93 94 1 1
## 11254 94 94 1 1
## 11255 95 94 1 1
## 11256 96 94 1 1
## 11257 97 94 1 1
## 11258 98 94 1 1
## 11259 99 94 1 1
## 11260 100 94 1 1
## 11261 101 94 1 1
## 11262 102 94 1 1
## 11263 103 94 1 1
## 11264 104 94 1 1
## 11265 105 94 1 1
## 11266 106 94 1 1
## 11267 107 94 1 1
## 11268 108 94 1 1
## 11269 109 94 1 1
## 11270 110 94 1 1
## 11271 111 94 1 1
## 11272 112 94 1 1
## 11273 113 94 1 1
## 11274 114 94 1 1
## 11275 115 94 1 1
## 11276 116 94 1 1
## 11277 117 94 1 1
## 11278 118 94 1 1
## 11279 119 94 1 1
## 11280 120 94 1 1
## 11281 1 95 1 1
## 11282 2 95 1 1
## 11283 3 95 1 1
## 11284 4 95 1 1
## 11285 5 95 1 1
## 11286 6 95 1 1
## 11287 7 95 1 1
## 11288 8 95 1 1
## 11289 9 95 1 1
## 11290 10 95 1 1
## 11291 11 95 1 1
## 11292 12 95 1 1
## 11293 13 95 1 1
## 11294 14 95 1 1
## 11295 15 95 1 1
## 11296 16 95 1 1
## 11297 17 95 1 1
## 11298 18 95 1 1
## 11299 19 95 1 1
## 11300 20 95 1 1
## 11301 21 95 1 1
## 11302 22 95 1 1
## 11303 23 95 1 1
## 11304 24 95 1 1
## 11305 25 95 1 1
## 11306 26 95 1 1
## 11307 27 95 1 1
## 11308 28 95 1 1
## 11309 29 95 1 1
## 11310 30 95 1 1
## 11311 31 95 1 1
## 11312 32 95 1 1
## 11313 33 95 1 1
## 11314 34 95 1 1
## 11315 35 95 1 1
## 11316 36 95 1 1
## 11317 37 95 1 1
## 11318 38 95 1 1
## 11319 39 95 1 1
## 11320 40 95 1 1
## 11321 41 95 1 1
## 11322 42 95 1 1
## 11323 43 95 1 1
## 11324 44 95 1 1
## 11325 45 95 1 1
## 11326 46 95 1 1
## 11327 47 95 1 1
## 11328 48 95 1 1
## 11329 49 95 1 1
## 11330 50 95 1 1
## 11331 51 95 1 1
## 11332 52 95 1 1
## 11333 53 95 1 1
## 11334 54 95 1 1
## 11335 55 95 1 1
## 11336 56 95 1 1
## 11337 57 95 1 1
## 11338 58 95 1 1
## 11339 59 95 1 1
## 11340 60 95 1 1
## 11341 61 95 1 1
## 11342 62 95 1 1
## 11343 63 95 1 1
## 11344 64 95 1 1
## 11345 65 95 1 1
## 11346 66 95 1 1
## 11347 67 95 1 1
## 11348 68 95 1 1
## 11349 69 95 1 1
## 11350 70 95 1 1
## 11351 71 95 1 1
## 11352 72 95 1 1
## 11353 73 95 1 1
## 11354 74 95 1 1
## 11355 75 95 1 1
## 11356 76 95 1 1
## 11357 77 95 1 1
## 11358 78 95 1 1
## 11359 79 95 1 1
## 11360 80 95 1 1
## 11361 81 95 1 1
## 11362 82 95 1 1
## 11363 83 95 1 1
## 11364 84 95 1 1
## 11365 85 95 1 1
## 11366 86 95 1 1
## 11367 87 95 1 1
## 11368 88 95 1 1
## 11369 89 95 1 1
## 11370 90 95 1 1
## 11371 91 95 1 1
## 11372 92 95 1 1
## 11373 93 95 1 1
## 11374 94 95 1 1
## 11375 95 95 1 1
## 11376 96 95 1 1
## 11377 97 95 1 1
## 11378 98 95 1 1
## 11379 99 95 1 1
## 11380 100 95 1 1
## 11381 101 95 1 1
## 11382 102 95 1 1
## 11383 103 95 1 1
## 11384 104 95 1 1
## 11385 105 95 1 1
## 11386 106 95 1 1
## 11387 107 95 1 1
## 11388 108 95 1 1
## 11389 109 95 1 1
## 11390 110 95 1 1
## 11391 111 95 1 1
## 11392 112 95 1 1
## 11393 113 95 1 1
## 11394 114 95 1 1
## 11395 115 95 1 1
## 11396 116 95 1 1
## 11397 117 95 1 1
## 11398 118 95 1 1
## 11399 119 95 1 1
## 11400 120 95 1 1
## 11401 1 96 1 1
## 11402 2 96 1 1
## 11403 3 96 1 1
## 11404 4 96 1 1
## 11405 5 96 1 1
## 11406 6 96 1 1
## 11407 7 96 1 1
## 11408 8 96 1 1
## 11409 9 96 1 1
## 11410 10 96 1 1
## 11411 11 96 1 1
## 11412 12 96 1 1
## 11413 13 96 1 1
## 11414 14 96 1 1
## 11415 15 96 1 1
## 11416 16 96 1 1
## 11417 17 96 1 1
## 11418 18 96 1 1
## 11419 19 96 1 1
## 11420 20 96 1 1
## 11421 21 96 1 1
## 11422 22 96 1 1
## 11423 23 96 1 1
## 11424 24 96 1 1
## 11425 25 96 1 1
## 11426 26 96 1 1
## 11427 27 96 1 1
## 11428 28 96 1 1
## 11429 29 96 1 1
## 11430 30 96 1 1
## 11431 31 96 1 1
## 11432 32 96 1 1
## 11433 33 96 1 1
## 11434 34 96 1 1
## 11435 35 96 1 1
## 11436 36 96 1 1
## 11437 37 96 1 1
## 11438 38 96 1 1
## 11439 39 96 1 1
## 11440 40 96 1 1
## 11441 41 96 1 1
## 11442 42 96 1 1
## 11443 43 96 1 1
## 11444 44 96 1 1
## 11445 45 96 1 1
## 11446 46 96 1 1
## 11447 47 96 1 1
## 11448 48 96 1 1
## 11449 49 96 1 1
## 11450 50 96 1 1
## 11451 51 96 1 1
## 11452 52 96 1 1
## 11453 53 96 1 1
## 11454 54 96 1 1
## 11455 55 96 1 1
## 11456 56 96 1 1
## 11457 57 96 1 1
## 11458 58 96 1 1
## 11459 59 96 1 1
## 11460 60 96 1 1
## 11461 61 96 1 1
## 11462 62 96 1 1
## 11463 63 96 1 1
## 11464 64 96 1 1
## 11465 65 96 1 1
## 11466 66 96 1 1
## 11467 67 96 1 1
## 11468 68 96 1 1
## 11469 69 96 1 1
## 11470 70 96 1 1
## 11471 71 96 1 1
## 11472 72 96 1 1
## 11473 73 96 1 1
## 11474 74 96 1 1
## 11475 75 96 1 1
## 11476 76 96 1 1
## 11477 77 96 1 1
## 11478 78 96 1 1
## 11479 79 96 1 1
## 11480 80 96 1 1
## 11481 81 96 1 1
## 11482 82 96 1 1
## 11483 83 96 1 1
## 11484 84 96 1 1
## 11485 85 96 1 1
## 11486 86 96 1 1
## 11487 87 96 1 1
## 11488 88 96 1 1
## 11489 89 96 1 1
## 11490 90 96 1 1
## 11491 91 96 1 1
## 11492 92 96 1 1
## 11493 93 96 1 1
## 11494 94 96 1 1
## 11495 95 96 1 1
## 11496 96 96 1 1
## 11497 97 96 1 1
## 11498 98 96 1 1
## 11499 99 96 1 1
## 11500 100 96 1 1
## 11501 101 96 1 1
## 11502 102 96 1 1
## 11503 103 96 1 1
## 11504 104 96 1 1
## 11505 105 96 1 1
## 11506 106 96 1 1
## 11507 107 96 1 1
## 11508 108 96 1 1
## 11509 109 96 1 1
## 11510 110 96 1 1
## 11511 111 96 1 1
## 11512 112 96 1 1
## 11513 113 96 1 1
## 11514 114 96 1 1
## 11515 115 96 1 1
## 11516 116 96 1 1
## 11517 117 96 1 1
## 11518 118 96 1 1
## 11519 119 96 1 1
## 11520 120 96 1 1
## 11521 1 97 1 1
## 11522 2 97 1 1
## 11523 3 97 1 1
## 11524 4 97 1 1
## 11525 5 97 1 1
## 11526 6 97 1 1
## 11527 7 97 1 1
## 11528 8 97 1 1
## 11529 9 97 1 1
## 11530 10 97 1 1
## 11531 11 97 1 1
## 11532 12 97 1 1
## 11533 13 97 1 1
## 11534 14 97 1 1
## 11535 15 97 1 1
## 11536 16 97 1 1
## 11537 17 97 1 1
## 11538 18 97 1 1
## 11539 19 97 1 1
## 11540 20 97 1 1
## 11541 21 97 1 1
## 11542 22 97 1 1
## 11543 23 97 1 1
## 11544 24 97 1 1
## 11545 25 97 1 1
## 11546 26 97 1 1
## 11547 27 97 1 1
## 11548 28 97 1 1
## 11549 29 97 1 1
## 11550 30 97 1 1
## 11551 31 97 1 1
## 11552 32 97 1 1
## 11553 33 97 1 1
## 11554 34 97 1 1
## 11555 35 97 1 1
## 11556 36 97 1 1
## 11557 37 97 1 1
## 11558 38 97 1 1
## 11559 39 97 1 1
## 11560 40 97 1 1
## 11561 41 97 1 1
## 11562 42 97 1 1
## 11563 43 97 1 1
## 11564 44 97 1 1
## 11565 45 97 1 1
## 11566 46 97 1 1
## 11567 47 97 1 1
## 11568 48 97 1 1
## 11569 49 97 1 1
## 11570 50 97 1 1
## 11571 51 97 1 1
## 11572 52 97 1 1
## 11573 53 97 1 1
## 11574 54 97 1 1
## 11575 55 97 1 1
## 11576 56 97 1 1
## 11577 57 97 1 1
## 11578 58 97 1 1
## 11579 59 97 1 1
## 11580 60 97 1 1
## 11581 61 97 1 1
## 11582 62 97 1 1
## 11583 63 97 1 1
## 11584 64 97 1 1
## 11585 65 97 1 1
## 11586 66 97 1 1
## 11587 67 97 1 1
## 11588 68 97 1 1
## 11589 69 97 1 1
## 11590 70 97 1 1
## 11591 71 97 1 1
## 11592 72 97 1 1
## 11593 73 97 1 1
## 11594 74 97 1 1
## 11595 75 97 1 1
## 11596 76 97 1 1
## 11597 77 97 1 1
## 11598 78 97 1 1
## 11599 79 97 1 1
## 11600 80 97 1 1
## 11601 81 97 1 1
## 11602 82 97 1 1
## 11603 83 97 1 1
## 11604 84 97 1 1
## 11605 85 97 1 1
## 11606 86 97 1 1
## 11607 87 97 1 1
## 11608 88 97 1 1
## 11609 89 97 1 1
## 11610 90 97 1 1
## 11611 91 97 1 1
## 11612 92 97 1 1
## 11613 93 97 1 1
## 11614 94 97 1 1
## 11615 95 97 1 1
## 11616 96 97 1 1
## 11617 97 97 1 1
## 11618 98 97 1 1
## 11619 99 97 1 1
## 11620 100 97 1 1
## 11621 101 97 1 1
## 11622 102 97 1 1
## 11623 103 97 1 1
## 11624 104 97 1 1
## 11625 105 97 1 1
## 11626 106 97 1 1
## 11627 107 97 1 1
## 11628 108 97 1 1
## 11629 109 97 1 1
## 11630 110 97 1 1
## 11631 111 97 1 1
## 11632 112 97 1 1
## 11633 113 97 1 1
## 11634 114 97 1 1
## 11635 115 97 1 1
## 11636 116 97 1 1
## 11637 117 97 1 1
## 11638 118 97 1 1
## 11639 119 97 1 1
## 11640 120 97 1 1
## 11641 1 98 1 1
## 11642 2 98 1 1
## 11643 3 98 1 1
## 11644 4 98 1 1
## 11645 5 98 1 1
## 11646 6 98 1 1
## 11647 7 98 1 1
## 11648 8 98 1 1
## 11649 9 98 1 1
## 11650 10 98 1 1
## 11651 11 98 1 1
## 11652 12 98 1 1
## 11653 13 98 1 1
## 11654 14 98 1 1
## 11655 15 98 1 1
## 11656 16 98 1 1
## 11657 17 98 1 1
## 11658 18 98 1 1
## 11659 19 98 1 1
## 11660 20 98 1 1
## 11661 21 98 1 1
## 11662 22 98 1 1
## 11663 23 98 1 1
## 11664 24 98 1 1
## 11665 25 98 1 1
## 11666 26 98 1 1
## 11667 27 98 1 1
## 11668 28 98 1 1
## 11669 29 98 1 1
## 11670 30 98 1 1
## 11671 31 98 1 1
## 11672 32 98 1 1
## 11673 33 98 1 1
## 11674 34 98 1 1
## 11675 35 98 1 1
## 11676 36 98 1 1
## 11677 37 98 1 1
## 11678 38 98 1 1
## 11679 39 98 1 1
## 11680 40 98 1 1
## 11681 41 98 1 1
## 11682 42 98 1 1
## 11683 43 98 1 1
## 11684 44 98 1 1
## 11685 45 98 1 1
## 11686 46 98 1 1
## 11687 47 98 1 1
## 11688 48 98 1 1
## 11689 49 98 1 1
## 11690 50 98 1 1
## 11691 51 98 1 1
## 11692 52 98 1 1
## 11693 53 98 1 1
## 11694 54 98 1 1
## 11695 55 98 1 1
## 11696 56 98 1 1
## 11697 57 98 1 1
## 11698 58 98 1 1
## 11699 59 98 1 1
## 11700 60 98 1 1
## 11701 61 98 1 1
## 11702 62 98 1 1
## 11703 63 98 1 1
## 11704 64 98 1 1
## 11705 65 98 1 1
## 11706 66 98 1 1
## 11707 67 98 1 1
## 11708 68 98 1 1
## 11709 69 98 1 1
## 11710 70 98 1 1
## 11711 71 98 1 1
## 11712 72 98 1 1
## 11713 73 98 1 1
## 11714 74 98 1 1
## 11715 75 98 1 1
## 11716 76 98 1 1
## 11717 77 98 1 1
## 11718 78 98 1 1
## 11719 79 98 1 1
## 11720 80 98 1 1
## 11721 81 98 1 1
## 11722 82 98 1 1
## 11723 83 98 1 1
## 11724 84 98 1 1
## 11725 85 98 1 1
## 11726 86 98 1 1
## 11727 87 98 1 1
## 11728 88 98 1 1
## 11729 89 98 1 1
## 11730 90 98 1 1
## 11731 91 98 1 1
## 11732 92 98 1 1
## 11733 93 98 1 1
## 11734 94 98 1 1
## 11735 95 98 1 1
## 11736 96 98 1 1
## 11737 97 98 1 1
## 11738 98 98 1 1
## 11739 99 98 1 1
## 11740 100 98 1 1
## 11741 101 98 1 1
## 11742 102 98 1 1
## 11743 103 98 1 1
## 11744 104 98 1 1
## 11745 105 98 1 1
## 11746 106 98 1 1
## 11747 107 98 1 1
## 11748 108 98 1 1
## 11749 109 98 1 1
## 11750 110 98 1 1
## 11751 111 98 1 1
## 11752 112 98 1 1
## 11753 113 98 1 1
## 11754 114 98 1 1
## 11755 115 98 1 1
## 11756 116 98 1 1
## 11757 117 98 1 1
## 11758 118 98 1 1
## 11759 119 98 1 1
## 11760 120 98 1 1
## 11761 1 99 1 1
## 11762 2 99 1 1
## 11763 3 99 1 1
## 11764 4 99 1 1
## 11765 5 99 1 1
## 11766 6 99 1 1
## 11767 7 99 1 1
## 11768 8 99 1 1
## 11769 9 99 1 1
## 11770 10 99 1 1
## 11771 11 99 1 1
## 11772 12 99 1 1
## 11773 13 99 1 1
## 11774 14 99 1 1
## 11775 15 99 1 1
## 11776 16 99 1 1
## 11777 17 99 1 1
## 11778 18 99 1 1
## 11779 19 99 1 1
## 11780 20 99 1 1
## 11781 21 99 1 1
## 11782 22 99 1 1
## 11783 23 99 1 1
## 11784 24 99 1 1
## 11785 25 99 1 1
## 11786 26 99 1 1
## 11787 27 99 1 1
## 11788 28 99 1 1
## 11789 29 99 1 1
## 11790 30 99 1 1
## 11791 31 99 1 1
## 11792 32 99 1 1
## 11793 33 99 1 1
## 11794 34 99 1 1
## 11795 35 99 1 1
## 11796 36 99 1 1
## 11797 37 99 1 1
## 11798 38 99 1 1
## 11799 39 99 1 1
## 11800 40 99 1 1
## 11801 41 99 1 1
## 11802 42 99 1 1
## 11803 43 99 1 1
## 11804 44 99 1 1
## 11805 45 99 1 1
## 11806 46 99 1 1
## 11807 47 99 1 1
## 11808 48 99 1 1
## 11809 49 99 1 1
## 11810 50 99 1 1
## 11811 51 99 1 1
## 11812 52 99 1 1
## 11813 53 99 1 1
## 11814 54 99 1 1
## 11815 55 99 1 1
## 11816 56 99 1 1
## 11817 57 99 1 1
## 11818 58 99 1 1
## 11819 59 99 1 1
## 11820 60 99 1 1
## 11821 61 99 1 1
## 11822 62 99 1 1
## 11823 63 99 1 1
## 11824 64 99 1 1
## 11825 65 99 1 1
## 11826 66 99 1 1
## 11827 67 99 1 1
## 11828 68 99 1 1
## 11829 69 99 1 1
## 11830 70 99 1 1
## 11831 71 99 1 1
## 11832 72 99 1 1
## 11833 73 99 1 1
## 11834 74 99 1 1
## 11835 75 99 1 1
## 11836 76 99 1 1
## 11837 77 99 1 1
## 11838 78 99 1 1
## 11839 79 99 1 1
## 11840 80 99 1 1
## 11841 81 99 1 1
## 11842 82 99 1 1
## 11843 83 99 1 1
## 11844 84 99 1 1
## 11845 85 99 1 1
## 11846 86 99 1 1
## 11847 87 99 1 1
## 11848 88 99 1 1
## 11849 89 99 1 1
## 11850 90 99 1 1
## 11851 91 99 1 1
## 11852 92 99 1 1
## 11853 93 99 1 1
## 11854 94 99 1 1
## 11855 95 99 1 1
## 11856 96 99 1 1
## 11857 97 99 1 1
## 11858 98 99 1 1
## 11859 99 99 1 1
## 11860 100 99 1 1
## 11861 101 99 1 1
## 11862 102 99 1 1
## 11863 103 99 1 1
## 11864 104 99 1 1
## 11865 105 99 1 1
## 11866 106 99 1 1
## 11867 107 99 1 1
## 11868 108 99 1 1
## 11869 109 99 1 1
## 11870 110 99 1 1
## 11871 111 99 1 1
## 11872 112 99 1 1
## 11873 113 99 1 1
## 11874 114 99 1 1
## 11875 115 99 1 1
## 11876 116 99 1 1
## 11877 117 99 1 1
## 11878 118 99 1 1
## 11879 119 99 1 1
## 11880 120 99 1 1
## 11881 1 100 1 1
## 11882 2 100 1 1
## 11883 3 100 1 1
## 11884 4 100 1 1
## 11885 5 100 1 1
## 11886 6 100 1 1
## 11887 7 100 1 1
## 11888 8 100 1 1
## 11889 9 100 1 1
## 11890 10 100 1 1
## 11891 11 100 1 1
## 11892 12 100 1 1
## 11893 13 100 1 1
## 11894 14 100 1 1
## 11895 15 100 1 1
## 11896 16 100 1 1
## 11897 17 100 1 1
## 11898 18 100 1 1
## 11899 19 100 1 1
## 11900 20 100 1 1
## 11901 21 100 1 1
## 11902 22 100 1 1
## 11903 23 100 1 1
## 11904 24 100 1 1
## 11905 25 100 1 1
## 11906 26 100 1 1
## 11907 27 100 1 1
## 11908 28 100 1 1
## 11909 29 100 1 1
## 11910 30 100 1 1
## 11911 31 100 1 1
## 11912 32 100 1 1
## 11913 33 100 1 1
## 11914 34 100 1 1
## 11915 35 100 1 1
## 11916 36 100 1 1
## 11917 37 100 1 1
## 11918 38 100 1 1
## 11919 39 100 1 1
## 11920 40 100 1 1
## 11921 41 100 1 1
## 11922 42 100 1 1
## 11923 43 100 1 1
## 11924 44 100 1 1
## 11925 45 100 1 1
## 11926 46 100 1 1
## 11927 47 100 1 1
## 11928 48 100 1 1
## 11929 49 100 1 1
## 11930 50 100 1 1
## 11931 51 100 1 1
## 11932 52 100 1 1
## 11933 53 100 1 1
## 11934 54 100 1 1
## 11935 55 100 1 1
## 11936 56 100 1 1
## 11937 57 100 1 1
## 11938 58 100 1 1
## 11939 59 100 1 1
## 11940 60 100 1 1
## 11941 61 100 1 1
## 11942 62 100 1 1
## 11943 63 100 1 1
## 11944 64 100 1 1
## 11945 65 100 1 1
## 11946 66 100 1 1
## 11947 67 100 1 1
## 11948 68 100 1 1
## 11949 69 100 1 1
## 11950 70 100 1 1
## 11951 71 100 1 1
## 11952 72 100 1 1
## 11953 73 100 1 1
## 11954 74 100 1 1
## 11955 75 100 1 1
## 11956 76 100 1 1
## 11957 77 100 1 1
## 11958 78 100 1 1
## 11959 79 100 1 1
## 11960 80 100 1 1
## 11961 81 100 1 1
## 11962 82 100 1 1
## 11963 83 100 1 1
## 11964 84 100 1 1
## 11965 85 100 1 1
## 11966 86 100 1 1
## 11967 87 100 1 1
## 11968 88 100 1 1
## 11969 89 100 1 1
## 11970 90 100 1 1
## 11971 91 100 1 1
## 11972 92 100 1 1
## 11973 93 100 1 1
## 11974 94 100 1 1
## 11975 95 100 1 1
## 11976 96 100 1 1
## 11977 97 100 1 1
## 11978 98 100 1 1
## 11979 99 100 1 1
## 11980 100 100 1 1
## 11981 101 100 1 1
## 11982 102 100 1 1
## 11983 103 100 1 1
## 11984 104 100 1 1
## 11985 105 100 1 1
## 11986 106 100 1 1
## 11987 107 100 1 1
## 11988 108 100 1 1
## 11989 109 100 1 1
## 11990 110 100 1 1
## 11991 111 100 1 1
## 11992 112 100 1 1
## 11993 113 100 1 1
## 11994 114 100 1 1
## 11995 115 100 1 1
## 11996 116 100 1 1
## 11997 117 100 1 1
## 11998 118 100 1 1
## 11999 119 100 1 1
## 12000 120 100 1 1
## 12001 1 101 1 1
## 12002 2 101 1 1
## 12003 3 101 1 1
## 12004 4 101 1 1
## 12005 5 101 1 1
## 12006 6 101 1 1
## 12007 7 101 1 1
## 12008 8 101 1 1
## 12009 9 101 1 1
## 12010 10 101 1 1
## 12011 11 101 1 1
## 12012 12 101 1 1
## 12013 13 101 1 1
## 12014 14 101 1 1
## 12015 15 101 1 1
## 12016 16 101 1 1
## 12017 17 101 1 1
## 12018 18 101 1 1
## 12019 19 101 1 1
## 12020 20 101 1 1
## 12021 21 101 1 1
## 12022 22 101 1 1
## 12023 23 101 1 1
## 12024 24 101 1 1
## 12025 25 101 1 1
## 12026 26 101 1 1
## 12027 27 101 1 1
## 12028 28 101 1 1
## 12029 29 101 1 1
## 12030 30 101 1 1
## 12031 31 101 1 1
## 12032 32 101 1 1
## 12033 33 101 1 1
## 12034 34 101 1 1
## 12035 35 101 1 1
## 12036 36 101 1 1
## 12037 37 101 1 1
## 12038 38 101 1 1
## 12039 39 101 1 1
## 12040 40 101 1 1
## 12041 41 101 1 1
## 12042 42 101 1 1
## 12043 43 101 1 1
## 12044 44 101 1 1
## 12045 45 101 1 1
## 12046 46 101 1 1
## 12047 47 101 1 1
## 12048 48 101 1 1
## 12049 49 101 1 1
## 12050 50 101 1 1
## 12051 51 101 1 1
## 12052 52 101 1 1
## 12053 53 101 1 1
## 12054 54 101 1 1
## 12055 55 101 1 1
## 12056 56 101 1 1
## 12057 57 101 1 1
## 12058 58 101 1 1
## 12059 59 101 1 1
## 12060 60 101 1 1
## 12061 61 101 1 1
## 12062 62 101 1 1
## 12063 63 101 1 1
## 12064 64 101 1 1
## 12065 65 101 1 1
## 12066 66 101 1 1
## 12067 67 101 1 1
## 12068 68 101 1 1
## 12069 69 101 1 1
## 12070 70 101 1 1
## 12071 71 101 1 1
## 12072 72 101 1 1
## 12073 73 101 1 1
## 12074 74 101 1 1
## 12075 75 101 1 1
## 12076 76 101 1 1
## 12077 77 101 1 1
## 12078 78 101 1 1
## 12079 79 101 1 1
## 12080 80 101 1 1
## 12081 81 101 1 1
## 12082 82 101 1 1
## 12083 83 101 1 1
## 12084 84 101 1 1
## 12085 85 101 1 1
## 12086 86 101 1 1
## 12087 87 101 1 1
## 12088 88 101 1 1
## 12089 89 101 1 1
## 12090 90 101 1 1
## 12091 91 101 1 1
## 12092 92 101 1 1
## 12093 93 101 1 1
## 12094 94 101 1 1
## 12095 95 101 1 1
## 12096 96 101 1 1
## 12097 97 101 1 1
## 12098 98 101 1 1
## 12099 99 101 1 1
## 12100 100 101 1 1
## 12101 101 101 1 1
## 12102 102 101 1 1
## 12103 103 101 1 1
## 12104 104 101 1 1
## 12105 105 101 1 1
## 12106 106 101 1 1
## 12107 107 101 1 1
## 12108 108 101 1 1
## 12109 109 101 1 1
## 12110 110 101 1 1
## 12111 111 101 1 1
## 12112 112 101 1 1
## 12113 113 101 1 1
## 12114 114 101 1 1
## 12115 115 101 1 1
## 12116 116 101 1 1
## 12117 117 101 1 1
## 12118 118 101 1 1
## 12119 119 101 1 1
## 12120 120 101 1 1
## 12121 1 102 1 1
## 12122 2 102 1 1
## 12123 3 102 1 1
## 12124 4 102 1 1
## 12125 5 102 1 1
## 12126 6 102 1 1
## 12127 7 102 1 1
## 12128 8 102 1 1
## 12129 9 102 1 1
## 12130 10 102 1 1
## 12131 11 102 1 1
## 12132 12 102 1 1
## 12133 13 102 1 1
## 12134 14 102 1 1
## 12135 15 102 1 1
## 12136 16 102 1 1
## 12137 17 102 1 1
## 12138 18 102 1 1
## 12139 19 102 1 1
## 12140 20 102 1 1
## 12141 21 102 1 1
## 12142 22 102 1 1
## 12143 23 102 1 1
## 12144 24 102 1 1
## 12145 25 102 1 1
## 12146 26 102 1 1
## 12147 27 102 1 1
## 12148 28 102 1 1
## 12149 29 102 1 1
## 12150 30 102 1 1
## 12151 31 102 1 1
## 12152 32 102 1 1
## 12153 33 102 1 1
## 12154 34 102 1 1
## 12155 35 102 1 1
## 12156 36 102 1 1
## 12157 37 102 1 1
## 12158 38 102 1 1
## 12159 39 102 1 1
## 12160 40 102 1 1
## 12161 41 102 1 1
## 12162 42 102 1 1
## 12163 43 102 1 1
## 12164 44 102 1 1
## 12165 45 102 1 1
## 12166 46 102 1 1
## 12167 47 102 1 1
## 12168 48 102 1 1
## 12169 49 102 1 1
## 12170 50 102 1 1
## 12171 51 102 1 1
## 12172 52 102 1 1
## 12173 53 102 1 1
## 12174 54 102 1 1
## 12175 55 102 1 1
## 12176 56 102 1 1
## 12177 57 102 1 1
## 12178 58 102 1 1
## 12179 59 102 1 1
## 12180 60 102 1 1
## 12181 61 102 1 1
## 12182 62 102 1 1
## 12183 63 102 1 1
## 12184 64 102 1 1
## 12185 65 102 1 1
## 12186 66 102 1 1
## 12187 67 102 1 1
## 12188 68 102 1 1
## 12189 69 102 1 1
## 12190 70 102 1 1
## 12191 71 102 1 1
## 12192 72 102 1 1
## 12193 73 102 1 1
## 12194 74 102 1 1
## 12195 75 102 1 1
## 12196 76 102 1 1
## 12197 77 102 1 1
## 12198 78 102 1 1
## 12199 79 102 1 1
## 12200 80 102 1 1
## 12201 81 102 1 1
## 12202 82 102 1 1
## 12203 83 102 1 1
## 12204 84 102 1 1
## 12205 85 102 1 1
## 12206 86 102 1 1
## 12207 87 102 1 1
## 12208 88 102 1 1
## 12209 89 102 1 1
## 12210 90 102 1 1
## 12211 91 102 1 1
## 12212 92 102 1 1
## 12213 93 102 1 1
## 12214 94 102 1 1
## 12215 95 102 1 1
## 12216 96 102 1 1
## 12217 97 102 1 1
## 12218 98 102 1 1
## 12219 99 102 1 1
## 12220 100 102 1 1
## 12221 101 102 1 1
## 12222 102 102 1 1
## 12223 103 102 1 1
## 12224 104 102 1 1
## 12225 105 102 1 1
## 12226 106 102 1 1
## 12227 107 102 1 1
## 12228 108 102 1 1
## 12229 109 102 1 1
## 12230 110 102 1 1
## 12231 111 102 1 1
## 12232 112 102 1 1
## 12233 113 102 1 1
## 12234 114 102 1 1
## 12235 115 102 1 1
## 12236 116 102 1 1
## 12237 117 102 1 1
## 12238 118 102 1 1
## 12239 119 102 1 1
## 12240 120 102 1 1
## 12241 1 103 1 1
## 12242 2 103 1 1
## 12243 3 103 1 1
## 12244 4 103 1 1
## 12245 5 103 1 1
## 12246 6 103 1 1
## 12247 7 103 1 1
## 12248 8 103 1 1
## 12249 9 103 1 1
## 12250 10 103 1 1
## 12251 11 103 1 1
## 12252 12 103 1 1
## 12253 13 103 1 1
## 12254 14 103 1 1
## 12255 15 103 1 1
## 12256 16 103 1 1
## 12257 17 103 1 1
## 12258 18 103 1 1
## 12259 19 103 1 1
## 12260 20 103 1 1
## 12261 21 103 1 1
## 12262 22 103 1 1
## 12263 23 103 1 1
## 12264 24 103 1 1
## 12265 25 103 1 1
## 12266 26 103 1 1
## 12267 27 103 1 1
## 12268 28 103 1 1
## 12269 29 103 1 1
## 12270 30 103 1 1
## 12271 31 103 1 1
## 12272 32 103 1 1
## 12273 33 103 1 1
## 12274 34 103 1 1
## 12275 35 103 1 1
## 12276 36 103 1 1
## 12277 37 103 1 1
## 12278 38 103 1 1
## 12279 39 103 1 1
## 12280 40 103 1 1
## 12281 41 103 1 1
## 12282 42 103 1 1
## 12283 43 103 1 1
## 12284 44 103 1 1
## 12285 45 103 1 1
## 12286 46 103 1 1
## 12287 47 103 1 1
## 12288 48 103 1 1
## 12289 49 103 1 1
## 12290 50 103 1 1
## 12291 51 103 1 1
## 12292 52 103 1 1
## 12293 53 103 1 1
## 12294 54 103 1 1
## 12295 55 103 1 1
## 12296 56 103 1 1
## 12297 57 103 1 1
## 12298 58 103 1 1
## 12299 59 103 1 1
## 12300 60 103 1 1
## 12301 61 103 1 1
## 12302 62 103 1 1
## 12303 63 103 1 1
## 12304 64 103 1 1
## 12305 65 103 1 1
## 12306 66 103 1 1
## 12307 67 103 1 1
## 12308 68 103 1 1
## 12309 69 103 1 1
## 12310 70 103 1 1
## 12311 71 103 1 1
## 12312 72 103 1 1
## 12313 73 103 1 1
## 12314 74 103 1 1
## 12315 75 103 1 1
## 12316 76 103 1 1
## 12317 77 103 1 1
## 12318 78 103 1 1
## 12319 79 103 1 1
## 12320 80 103 1 1
## 12321 81 103 1 1
## 12322 82 103 1 1
## 12323 83 103 1 1
## 12324 84 103 1 1
## 12325 85 103 1 1
## 12326 86 103 1 1
## 12327 87 103 1 1
## 12328 88 103 1 1
## 12329 89 103 1 1
## 12330 90 103 1 1
## 12331 91 103 1 1
## 12332 92 103 1 1
## 12333 93 103 1 1
## 12334 94 103 1 1
## 12335 95 103 1 1
## 12336 96 103 1 1
## 12337 97 103 1 1
## 12338 98 103 1 1
## 12339 99 103 1 1
## 12340 100 103 1 1
## 12341 101 103 1 1
## 12342 102 103 1 1
## 12343 103 103 1 1
## 12344 104 103 1 1
## 12345 105 103 1 1
## 12346 106 103 1 1
## 12347 107 103 1 1
## 12348 108 103 1 1
## 12349 109 103 1 1
## 12350 110 103 1 1
## 12351 111 103 1 1
## 12352 112 103 1 1
## 12353 113 103 1 1
## 12354 114 103 1 1
## 12355 115 103 1 1
## 12356 116 103 1 1
## 12357 117 103 1 1
## 12358 118 103 1 1
## 12359 119 103 1 1
## 12360 120 103 1 1
## 12361 1 104 1 1
## 12362 2 104 1 1
## 12363 3 104 1 1
## 12364 4 104 1 1
## 12365 5 104 1 1
## 12366 6 104 1 1
## 12367 7 104 1 1
## 12368 8 104 1 1
## 12369 9 104 1 1
## 12370 10 104 1 1
## 12371 11 104 1 1
## 12372 12 104 1 1
## 12373 13 104 1 1
## 12374 14 104 1 1
## 12375 15 104 1 1
## 12376 16 104 1 1
## 12377 17 104 1 1
## 12378 18 104 1 1
## 12379 19 104 1 1
## 12380 20 104 1 1
## 12381 21 104 1 1
## 12382 22 104 1 1
## 12383 23 104 1 1
## 12384 24 104 1 1
## 12385 25 104 1 1
## 12386 26 104 1 1
## 12387 27 104 1 1
## 12388 28 104 1 1
## 12389 29 104 1 1
## 12390 30 104 1 1
## 12391 31 104 1 1
## 12392 32 104 1 1
## 12393 33 104 1 1
## 12394 34 104 1 1
## 12395 35 104 1 1
## 12396 36 104 1 1
## 12397 37 104 1 1
## 12398 38 104 1 1
## 12399 39 104 1 1
## 12400 40 104 1 1
## 12401 41 104 1 1
## 12402 42 104 1 1
## 12403 43 104 1 1
## 12404 44 104 1 1
## 12405 45 104 1 1
## 12406 46 104 1 1
## 12407 47 104 1 1
## 12408 48 104 1 1
## 12409 49 104 1 1
## 12410 50 104 1 1
## 12411 51 104 1 1
## 12412 52 104 1 1
## 12413 53 104 1 1
## 12414 54 104 1 1
## 12415 55 104 1 1
## 12416 56 104 1 1
## 12417 57 104 1 1
## 12418 58 104 1 1
## 12419 59 104 1 1
## 12420 60 104 1 1
## 12421 61 104 1 1
## 12422 62 104 1 1
## 12423 63 104 1 1
## 12424 64 104 1 1
## 12425 65 104 1 1
## 12426 66 104 1 1
## 12427 67 104 1 1
## 12428 68 104 1 1
## 12429 69 104 1 1
## 12430 70 104 1 1
## 12431 71 104 1 1
## 12432 72 104 1 1
## 12433 73 104 1 1
## 12434 74 104 1 1
## 12435 75 104 1 1
## 12436 76 104 1 1
## 12437 77 104 1 1
## 12438 78 104 1 1
## 12439 79 104 1 1
## 12440 80 104 1 1
## 12441 81 104 1 1
## 12442 82 104 1 1
## 12443 83 104 1 1
## 12444 84 104 1 1
## 12445 85 104 1 1
## 12446 86 104 1 1
## 12447 87 104 1 1
## 12448 88 104 1 1
## 12449 89 104 1 1
## 12450 90 104 1 1
## 12451 91 104 1 1
## 12452 92 104 1 1
## 12453 93 104 1 1
## 12454 94 104 1 1
## 12455 95 104 1 1
## 12456 96 104 1 1
## 12457 97 104 1 1
## 12458 98 104 1 1
## 12459 99 104 1 1
## 12460 100 104 1 1
## 12461 101 104 1 1
## 12462 102 104 1 1
## 12463 103 104 1 1
## 12464 104 104 1 1
## 12465 105 104 1 1
## 12466 106 104 1 1
## 12467 107 104 1 1
## 12468 108 104 1 1
## 12469 109 104 1 1
## 12470 110 104 1 1
## 12471 111 104 1 1
## 12472 112 104 1 1
## 12473 113 104 1 1
## 12474 114 104 1 1
## 12475 115 104 1 1
## 12476 116 104 1 1
## 12477 117 104 1 1
## 12478 118 104 1 1
## 12479 119 104 1 1
## 12480 120 104 1 1
## 12481 1 105 1 1
## 12482 2 105 1 1
## 12483 3 105 1 1
## 12484 4 105 1 1
## 12485 5 105 1 1
## 12486 6 105 1 1
## 12487 7 105 1 1
## 12488 8 105 1 1
## 12489 9 105 1 1
## 12490 10 105 1 1
## 12491 11 105 1 1
## 12492 12 105 1 1
## 12493 13 105 1 1
## 12494 14 105 1 1
## 12495 15 105 1 1
## 12496 16 105 1 1
## 12497 17 105 1 1
## 12498 18 105 1 1
## 12499 19 105 1 1
## 12500 20 105 1 1
## 12501 21 105 1 1
## 12502 22 105 1 1
## 12503 23 105 1 1
## 12504 24 105 1 1
## 12505 25 105 1 1
## 12506 26 105 1 1
## 12507 27 105 1 1
## 12508 28 105 1 1
## 12509 29 105 1 1
## 12510 30 105 1 1
## 12511 31 105 1 1
## 12512 32 105 1 1
## 12513 33 105 1 1
## 12514 34 105 1 1
## 12515 35 105 1 1
## 12516 36 105 1 1
## 12517 37 105 1 1
## 12518 38 105 1 1
## 12519 39 105 1 1
## 12520 40 105 1 1
## 12521 41 105 1 1
## 12522 42 105 1 1
## 12523 43 105 1 1
## 12524 44 105 1 1
## 12525 45 105 1 1
## 12526 46 105 1 1
## 12527 47 105 1 1
## 12528 48 105 1 1
## 12529 49 105 1 1
## 12530 50 105 1 1
## 12531 51 105 1 1
## 12532 52 105 1 1
## 12533 53 105 1 1
## 12534 54 105 1 1
## 12535 55 105 1 1
## 12536 56 105 1 1
## 12537 57 105 1 1
## 12538 58 105 1 1
## 12539 59 105 1 1
## 12540 60 105 1 1
## 12541 61 105 1 1
## 12542 62 105 1 1
## 12543 63 105 1 1
## 12544 64 105 1 1
## 12545 65 105 1 1
## 12546 66 105 1 1
## 12547 67 105 1 1
## 12548 68 105 1 1
## 12549 69 105 1 1
## 12550 70 105 1 1
## 12551 71 105 1 1
## 12552 72 105 1 1
## 12553 73 105 1 1
## 12554 74 105 1 1
## 12555 75 105 1 1
## 12556 76 105 1 1
## 12557 77 105 1 1
## 12558 78 105 1 1
## 12559 79 105 1 1
## 12560 80 105 1 1
## 12561 81 105 1 1
## 12562 82 105 1 1
## 12563 83 105 1 1
## 12564 84 105 1 1
## 12565 85 105 1 1
## 12566 86 105 1 1
## 12567 87 105 1 1
## 12568 88 105 1 1
## 12569 89 105 1 1
## 12570 90 105 1 1
## 12571 91 105 1 1
## 12572 92 105 1 1
## 12573 93 105 1 1
## 12574 94 105 1 1
## 12575 95 105 1 1
## 12576 96 105 1 1
## 12577 97 105 1 1
## 12578 98 105 1 1
## 12579 99 105 1 1
## 12580 100 105 1 1
## 12581 101 105 1 1
## 12582 102 105 1 1
## 12583 103 105 1 1
## 12584 104 105 1 1
## 12585 105 105 1 1
## 12586 106 105 1 1
## 12587 107 105 1 1
## 12588 108 105 1 1
## 12589 109 105 1 1
## 12590 110 105 1 1
## 12591 111 105 1 1
## 12592 112 105 1 1
## 12593 113 105 1 1
## 12594 114 105 1 1
## 12595 115 105 1 1
## 12596 116 105 1 1
## 12597 117 105 1 1
## 12598 118 105 1 1
## 12599 119 105 1 1
## 12600 120 105 1 1
## 12601 1 106 1 1
## 12602 2 106 1 1
## 12603 3 106 1 1
## 12604 4 106 1 1
## 12605 5 106 1 1
## 12606 6 106 1 1
## 12607 7 106 1 1
## 12608 8 106 1 1
## 12609 9 106 1 1
## 12610 10 106 1 1
## 12611 11 106 1 1
## 12612 12 106 1 1
## 12613 13 106 1 1
## 12614 14 106 1 1
## 12615 15 106 1 1
## 12616 16 106 1 1
## 12617 17 106 1 1
## 12618 18 106 1 1
## 12619 19 106 1 1
## 12620 20 106 1 1
## 12621 21 106 1 1
## 12622 22 106 1 1
## 12623 23 106 1 1
## 12624 24 106 1 1
## 12625 25 106 1 1
## 12626 26 106 1 1
## 12627 27 106 1 1
## 12628 28 106 1 1
## 12629 29 106 1 1
## 12630 30 106 1 1
## 12631 31 106 1 1
## 12632 32 106 1 1
## 12633 33 106 1 1
## 12634 34 106 1 1
## 12635 35 106 1 1
## 12636 36 106 1 1
## 12637 37 106 1 1
## 12638 38 106 1 1
## 12639 39 106 1 1
## 12640 40 106 1 1
## 12641 41 106 1 1
## 12642 42 106 1 1
## 12643 43 106 1 1
## 12644 44 106 1 1
## 12645 45 106 1 1
## 12646 46 106 1 1
## 12647 47 106 1 1
## 12648 48 106 1 1
## 12649 49 106 1 1
## 12650 50 106 1 1
## 12651 51 106 1 1
## 12652 52 106 1 1
## 12653 53 106 1 1
## 12654 54 106 1 1
## 12655 55 106 1 1
## 12656 56 106 1 1
## 12657 57 106 1 1
## 12658 58 106 1 1
## 12659 59 106 1 1
## 12660 60 106 1 1
## 12661 61 106 1 1
## 12662 62 106 1 1
## 12663 63 106 1 1
## 12664 64 106 1 1
## 12665 65 106 1 1
## 12666 66 106 1 1
## 12667 67 106 1 1
## 12668 68 106 1 1
## 12669 69 106 1 1
## 12670 70 106 1 1
## 12671 71 106 1 1
## 12672 72 106 1 1
## 12673 73 106 1 1
## 12674 74 106 1 1
## 12675 75 106 1 1
## 12676 76 106 1 1
## 12677 77 106 1 1
## 12678 78 106 1 1
## 12679 79 106 1 1
## 12680 80 106 1 1
## 12681 81 106 1 1
## 12682 82 106 1 1
## 12683 83 106 1 1
## 12684 84 106 1 1
## 12685 85 106 1 1
## 12686 86 106 1 1
## 12687 87 106 1 1
## 12688 88 106 1 1
## 12689 89 106 1 1
## 12690 90 106 1 1
## 12691 91 106 1 1
## 12692 92 106 1 1
## 12693 93 106 1 1
## 12694 94 106 1 1
## 12695 95 106 1 1
## 12696 96 106 1 1
## 12697 97 106 1 1
## 12698 98 106 1 1
## 12699 99 106 1 1
## 12700 100 106 1 1
## 12701 101 106 1 1
## 12702 102 106 1 1
## 12703 103 106 1 1
## 12704 104 106 1 1
## 12705 105 106 1 1
## 12706 106 106 1 1
## 12707 107 106 1 1
## 12708 108 106 1 1
## 12709 109 106 1 1
## 12710 110 106 1 1
## 12711 111 106 1 1
## 12712 112 106 1 1
## 12713 113 106 1 1
## 12714 114 106 1 1
## 12715 115 106 1 1
## 12716 116 106 1 1
## 12717 117 106 1 1
## 12718 118 106 1 1
## 12719 119 106 1 1
## 12720 120 106 1 1
## 12721 1 107 1 1
## 12722 2 107 1 1
## 12723 3 107 1 1
## 12724 4 107 1 1
## 12725 5 107 1 1
## 12726 6 107 1 1
## 12727 7 107 1 1
## 12728 8 107 1 1
## 12729 9 107 1 1
## 12730 10 107 1 1
## 12731 11 107 1 1
## 12732 12 107 1 1
## 12733 13 107 1 1
## 12734 14 107 1 1
## 12735 15 107 1 1
## 12736 16 107 1 1
## 12737 17 107 1 1
## 12738 18 107 1 1
## 12739 19 107 1 1
## 12740 20 107 1 1
## 12741 21 107 1 1
## 12742 22 107 1 1
## 12743 23 107 1 1
## 12744 24 107 1 1
## 12745 25 107 1 1
## 12746 26 107 1 1
## 12747 27 107 1 1
## 12748 28 107 1 1
## 12749 29 107 1 1
## 12750 30 107 1 1
## 12751 31 107 1 1
## 12752 32 107 1 1
## 12753 33 107 1 1
## 12754 34 107 1 1
## 12755 35 107 1 1
## 12756 36 107 1 1
## 12757 37 107 1 1
## 12758 38 107 1 1
## 12759 39 107 1 1
## 12760 40 107 1 1
## 12761 41 107 1 1
## 12762 42 107 1 1
## 12763 43 107 1 1
## 12764 44 107 1 1
## 12765 45 107 1 1
## 12766 46 107 1 1
## 12767 47 107 1 1
## 12768 48 107 1 1
## 12769 49 107 1 1
## 12770 50 107 1 1
## 12771 51 107 1 1
## 12772 52 107 1 1
## 12773 53 107 1 1
## 12774 54 107 1 1
## 12775 55 107 1 1
## 12776 56 107 1 1
## 12777 57 107 1 1
## 12778 58 107 1 1
## 12779 59 107 1 1
## 12780 60 107 1 1
## 12781 61 107 1 1
## 12782 62 107 1 1
## 12783 63 107 1 1
## 12784 64 107 1 1
## 12785 65 107 1 1
## 12786 66 107 1 1
## 12787 67 107 1 1
## 12788 68 107 1 1
## 12789 69 107 1 1
## 12790 70 107 1 1
## 12791 71 107 1 1
## 12792 72 107 1 1
## 12793 73 107 1 1
## 12794 74 107 1 1
## 12795 75 107 1 1
## 12796 76 107 1 1
## 12797 77 107 1 1
## 12798 78 107 1 1
## 12799 79 107 1 1
## 12800 80 107 1 1
## 12801 81 107 1 1
## 12802 82 107 1 1
## 12803 83 107 1 1
## 12804 84 107 1 1
## 12805 85 107 1 1
## 12806 86 107 1 1
## 12807 87 107 1 1
## 12808 88 107 1 1
## 12809 89 107 1 1
## 12810 90 107 1 1
## 12811 91 107 1 1
## 12812 92 107 1 1
## 12813 93 107 1 1
## 12814 94 107 1 1
## 12815 95 107 1 1
## 12816 96 107 1 1
## 12817 97 107 1 1
## 12818 98 107 1 1
## 12819 99 107 1 1
## 12820 100 107 1 1
## 12821 101 107 1 1
## 12822 102 107 1 1
## 12823 103 107 1 1
## 12824 104 107 1 1
## 12825 105 107 1 1
## 12826 106 107 1 1
## 12827 107 107 1 1
## 12828 108 107 1 1
## 12829 109 107 1 1
## 12830 110 107 1 1
## 12831 111 107 1 1
## 12832 112 107 1 1
## 12833 113 107 1 1
## 12834 114 107 1 1
## 12835 115 107 1 1
## 12836 116 107 1 1
## 12837 117 107 1 1
## 12838 118 107 1 1
## 12839 119 107 1 1
## 12840 120 107 1 1
## 12841 1 108 1 1
## 12842 2 108 1 1
## 12843 3 108 1 1
## 12844 4 108 1 1
## 12845 5 108 1 1
## 12846 6 108 1 1
## 12847 7 108 1 1
## 12848 8 108 1 1
## 12849 9 108 1 1
## 12850 10 108 1 1
## 12851 11 108 1 1
## 12852 12 108 1 1
## 12853 13 108 1 1
## 12854 14 108 1 1
## 12855 15 108 1 1
## 12856 16 108 1 1
## 12857 17 108 1 1
## 12858 18 108 1 1
## 12859 19 108 1 1
## 12860 20 108 1 1
## 12861 21 108 1 1
## 12862 22 108 1 1
## 12863 23 108 1 1
## 12864 24 108 1 1
## 12865 25 108 1 1
## 12866 26 108 1 1
## 12867 27 108 1 1
## 12868 28 108 1 1
## 12869 29 108 1 1
## 12870 30 108 1 1
## 12871 31 108 1 1
## 12872 32 108 1 1
## 12873 33 108 1 1
## 12874 34 108 1 1
## 12875 35 108 1 1
## 12876 36 108 1 1
## 12877 37 108 1 1
## 12878 38 108 1 1
## 12879 39 108 1 1
## 12880 40 108 1 1
## 12881 41 108 1 1
## 12882 42 108 1 1
## 12883 43 108 1 1
## 12884 44 108 1 1
## 12885 45 108 1 1
## 12886 46 108 1 1
## 12887 47 108 1 1
## 12888 48 108 1 1
## 12889 49 108 1 1
## 12890 50 108 1 1
## 12891 51 108 1 1
## 12892 52 108 1 1
## 12893 53 108 1 1
## 12894 54 108 1 1
## 12895 55 108 1 1
## 12896 56 108 1 1
## 12897 57 108 1 1
## 12898 58 108 1 1
## 12899 59 108 1 1
## 12900 60 108 1 1
## 12901 61 108 1 1
## 12902 62 108 1 1
## 12903 63 108 1 1
## 12904 64 108 1 1
## 12905 65 108 1 1
## 12906 66 108 1 1
## 12907 67 108 1 1
## 12908 68 108 1 1
## 12909 69 108 1 1
## 12910 70 108 1 1
## 12911 71 108 1 1
## 12912 72 108 1 1
## 12913 73 108 1 1
## 12914 74 108 1 1
## 12915 75 108 1 1
## 12916 76 108 1 1
## 12917 77 108 1 1
## 12918 78 108 1 1
## 12919 79 108 1 1
## 12920 80 108 1 1
## 12921 81 108 1 1
## 12922 82 108 1 1
## 12923 83 108 1 1
## 12924 84 108 1 1
## 12925 85 108 1 1
## 12926 86 108 1 1
## 12927 87 108 1 1
## 12928 88 108 1 1
## 12929 89 108 1 1
## 12930 90 108 1 1
## 12931 91 108 1 1
## 12932 92 108 1 1
## 12933 93 108 1 1
## 12934 94 108 1 1
## 12935 95 108 1 1
## 12936 96 108 1 1
## 12937 97 108 1 1
## 12938 98 108 1 1
## 12939 99 108 1 1
## 12940 100 108 1 1
## 12941 101 108 1 1
## 12942 102 108 1 1
## 12943 103 108 1 1
## 12944 104 108 1 1
## 12945 105 108 1 1
## 12946 106 108 1 1
## 12947 107 108 1 1
## 12948 108 108 1 1
## 12949 109 108 1 1
## 12950 110 108 1 1
## 12951 111 108 1 1
## 12952 112 108 1 1
## 12953 113 108 1 1
## 12954 114 108 1 1
## 12955 115 108 1 1
## 12956 116 108 1 1
## 12957 117 108 1 1
## 12958 118 108 1 1
## 12959 119 108 1 1
## 12960 120 108 1 1
## 12961 1 109 1 1
## 12962 2 109 1 1
## 12963 3 109 1 1
## 12964 4 109 1 1
## 12965 5 109 1 1
## 12966 6 109 1 1
## 12967 7 109 1 1
## 12968 8 109 1 1
## 12969 9 109 1 1
## 12970 10 109 1 1
## 12971 11 109 1 1
## 12972 12 109 1 1
## 12973 13 109 1 1
## 12974 14 109 1 1
## 12975 15 109 1 1
## 12976 16 109 1 1
## 12977 17 109 1 1
## 12978 18 109 1 1
## 12979 19 109 1 1
## 12980 20 109 1 1
## 12981 21 109 1 1
## 12982 22 109 1 1
## 12983 23 109 1 1
## 12984 24 109 1 1
## 12985 25 109 1 1
## 12986 26 109 1 1
## 12987 27 109 1 1
## 12988 28 109 1 1
## 12989 29 109 1 1
## 12990 30 109 1 1
## 12991 31 109 1 1
## 12992 32 109 1 1
## 12993 33 109 1 1
## 12994 34 109 1 1
## 12995 35 109 1 1
## 12996 36 109 1 1
## 12997 37 109 1 1
## 12998 38 109 1 1
## 12999 39 109 1 1
## 13000 40 109 1 1
## 13001 41 109 1 1
## 13002 42 109 1 1
## 13003 43 109 1 1
## 13004 44 109 1 1
## 13005 45 109 1 1
## 13006 46 109 1 1
## 13007 47 109 1 1
## 13008 48 109 1 1
## 13009 49 109 1 1
## 13010 50 109 1 1
## 13011 51 109 1 1
## 13012 52 109 1 1
## 13013 53 109 1 1
## 13014 54 109 1 1
## 13015 55 109 1 1
## 13016 56 109 1 1
## 13017 57 109 1 1
## 13018 58 109 1 1
## 13019 59 109 1 1
## 13020 60 109 1 1
## 13021 61 109 1 1
## 13022 62 109 1 1
## 13023 63 109 1 1
## 13024 64 109 1 1
## 13025 65 109 1 1
## 13026 66 109 1 1
## 13027 67 109 1 1
## 13028 68 109 1 1
## 13029 69 109 1 1
## 13030 70 109 1 1
## 13031 71 109 1 1
## 13032 72 109 1 1
## 13033 73 109 1 1
## 13034 74 109 1 1
## 13035 75 109 1 1
## 13036 76 109 1 1
## 13037 77 109 1 1
## 13038 78 109 1 1
## 13039 79 109 1 1
## 13040 80 109 1 1
## 13041 81 109 1 1
## 13042 82 109 1 1
## 13043 83 109 1 1
## 13044 84 109 1 1
## 13045 85 109 1 1
## 13046 86 109 1 1
## 13047 87 109 1 1
## 13048 88 109 1 1
## 13049 89 109 1 1
## 13050 90 109 1 1
## 13051 91 109 1 1
## 13052 92 109 1 1
## 13053 93 109 1 1
## 13054 94 109 1 1
## 13055 95 109 1 1
## 13056 96 109 1 1
## 13057 97 109 1 1
## 13058 98 109 1 1
## 13059 99 109 1 1
## 13060 100 109 1 1
## 13061 101 109 1 1
## 13062 102 109 1 1
## 13063 103 109 1 1
## 13064 104 109 1 1
## 13065 105 109 1 1
## 13066 106 109 1 1
## 13067 107 109 1 1
## 13068 108 109 1 1
## 13069 109 109 1 1
## 13070 110 109 1 1
## 13071 111 109 1 1
## 13072 112 109 1 1
## 13073 113 109 1 1
## 13074 114 109 1 1
## 13075 115 109 1 1
## 13076 116 109 1 1
## 13077 117 109 1 1
## 13078 118 109 1 1
## 13079 119 109 1 1
## 13080 120 109 1 1
## 13081 1 110 1 1
## 13082 2 110 1 1
## 13083 3 110 1 1
## 13084 4 110 1 1
## 13085 5 110 1 1
## 13086 6 110 1 1
## 13087 7 110 1 1
## 13088 8 110 1 1
## 13089 9 110 1 1
## 13090 10 110 1 1
## 13091 11 110 1 1
## 13092 12 110 1 1
## 13093 13 110 1 1
## 13094 14 110 1 1
## 13095 15 110 1 1
## 13096 16 110 1 1
## 13097 17 110 1 1
## 13098 18 110 1 1
## 13099 19 110 1 1
## 13100 20 110 1 1
## 13101 21 110 1 1
## 13102 22 110 1 1
## 13103 23 110 1 1
## 13104 24 110 1 1
## 13105 25 110 1 1
## 13106 26 110 1 1
## 13107 27 110 1 1
## 13108 28 110 1 1
## 13109 29 110 1 1
## 13110 30 110 1 1
## 13111 31 110 1 1
## 13112 32 110 1 1
## 13113 33 110 1 1
## 13114 34 110 1 1
## 13115 35 110 1 1
## 13116 36 110 1 1
## 13117 37 110 1 1
## 13118 38 110 1 1
## 13119 39 110 1 1
## 13120 40 110 1 1
## 13121 41 110 1 1
## 13122 42 110 1 1
## 13123 43 110 1 1
## 13124 44 110 1 1
## 13125 45 110 1 1
## 13126 46 110 1 1
## 13127 47 110 1 1
## 13128 48 110 1 1
## 13129 49 110 1 1
## 13130 50 110 1 1
## 13131 51 110 1 1
## 13132 52 110 1 1
## 13133 53 110 1 1
## 13134 54 110 1 1
## 13135 55 110 1 1
## 13136 56 110 1 1
## 13137 57 110 1 1
## 13138 58 110 1 1
## 13139 59 110 1 1
## 13140 60 110 1 1
## 13141 61 110 1 1
## 13142 62 110 1 1
## 13143 63 110 1 1
## 13144 64 110 1 1
## 13145 65 110 1 1
## 13146 66 110 1 1
## 13147 67 110 1 1
## 13148 68 110 1 1
## 13149 69 110 1 1
## 13150 70 110 1 1
## 13151 71 110 1 1
## 13152 72 110 1 1
## 13153 73 110 1 1
## 13154 74 110 1 1
## 13155 75 110 1 1
## 13156 76 110 1 1
## 13157 77 110 1 1
## 13158 78 110 1 1
## 13159 79 110 1 1
## 13160 80 110 1 1
## 13161 81 110 1 1
## 13162 82 110 1 1
## 13163 83 110 1 1
## 13164 84 110 1 1
## 13165 85 110 1 1
## 13166 86 110 1 1
## 13167 87 110 1 1
## 13168 88 110 1 1
## 13169 89 110 1 1
## 13170 90 110 1 1
## 13171 91 110 1 1
## 13172 92 110 1 1
## 13173 93 110 1 1
## 13174 94 110 1 1
## 13175 95 110 1 1
## 13176 96 110 1 1
## 13177 97 110 1 1
## 13178 98 110 1 1
## 13179 99 110 1 1
## 13180 100 110 1 1
## 13181 101 110 1 1
## 13182 102 110 1 1
## 13183 103 110 1 1
## 13184 104 110 1 1
## 13185 105 110 1 1
## 13186 106 110 1 1
## 13187 107 110 1 1
## 13188 108 110 1 1
## 13189 109 110 1 1
## 13190 110 110 1 1
## 13191 111 110 1 1
## 13192 112 110 1 1
## 13193 113 110 1 1
## 13194 114 110 1 1
## 13195 115 110 1 1
## 13196 116 110 1 1
## 13197 117 110 1 1
## 13198 118 110 1 1
## 13199 119 110 1 1
## 13200 120 110 1 1
## 13201 1 111 1 1
## 13202 2 111 1 1
## 13203 3 111 1 1
## 13204 4 111 1 1
## 13205 5 111 1 1
## 13206 6 111 1 1
## 13207 7 111 1 1
## 13208 8 111 1 1
## 13209 9 111 1 1
## 13210 10 111 1 1
## 13211 11 111 1 1
## 13212 12 111 1 1
## 13213 13 111 1 1
## 13214 14 111 1 1
## 13215 15 111 1 1
## 13216 16 111 1 1
## 13217 17 111 1 1
## 13218 18 111 1 1
## 13219 19 111 1 1
## 13220 20 111 1 1
## 13221 21 111 1 1
## 13222 22 111 1 1
## 13223 23 111 1 1
## 13224 24 111 1 1
## 13225 25 111 1 1
## 13226 26 111 1 1
## 13227 27 111 1 1
## 13228 28 111 1 1
## 13229 29 111 1 1
## 13230 30 111 1 1
## 13231 31 111 1 1
## 13232 32 111 1 1
## 13233 33 111 1 1
## 13234 34 111 1 1
## 13235 35 111 1 1
## 13236 36 111 1 1
## 13237 37 111 1 1
## 13238 38 111 1 1
## 13239 39 111 1 1
## 13240 40 111 1 1
## 13241 41 111 1 1
## 13242 42 111 1 1
## 13243 43 111 1 1
## 13244 44 111 1 1
## 13245 45 111 1 1
## 13246 46 111 1 1
## 13247 47 111 1 1
## 13248 48 111 1 1
## 13249 49 111 1 1
## 13250 50 111 1 1
## 13251 51 111 1 1
## 13252 52 111 1 1
## 13253 53 111 1 1
## 13254 54 111 1 1
## 13255 55 111 1 1
## 13256 56 111 1 1
## 13257 57 111 1 1
## 13258 58 111 1 1
## 13259 59 111 1 1
## 13260 60 111 1 1
## 13261 61 111 1 1
## 13262 62 111 1 1
## 13263 63 111 1 1
## 13264 64 111 1 1
## 13265 65 111 1 1
## 13266 66 111 1 1
## 13267 67 111 1 1
## 13268 68 111 1 1
## 13269 69 111 1 1
## 13270 70 111 1 1
## 13271 71 111 1 1
## 13272 72 111 1 1
## 13273 73 111 1 1
## 13274 74 111 1 1
## 13275 75 111 1 1
## 13276 76 111 1 1
## 13277 77 111 1 1
## 13278 78 111 1 1
## 13279 79 111 1 1
## 13280 80 111 1 1
## 13281 81 111 1 1
## 13282 82 111 1 1
## 13283 83 111 1 1
## 13284 84 111 1 1
## 13285 85 111 1 1
## 13286 86 111 1 1
## 13287 87 111 1 1
## 13288 88 111 1 1
## 13289 89 111 1 1
## 13290 90 111 1 1
## 13291 91 111 1 1
## 13292 92 111 1 1
## 13293 93 111 1 1
## 13294 94 111 1 1
## 13295 95 111 1 1
## 13296 96 111 1 1
## 13297 97 111 1 1
## 13298 98 111 1 1
## 13299 99 111 1 1
## 13300 100 111 1 1
## 13301 101 111 1 1
## 13302 102 111 1 1
## 13303 103 111 1 1
## 13304 104 111 1 1
## 13305 105 111 1 1
## 13306 106 111 1 1
## 13307 107 111 1 1
## 13308 108 111 1 1
## 13309 109 111 1 1
## 13310 110 111 1 1
## 13311 111 111 1 1
## 13312 112 111 1 1
## 13313 113 111 1 1
## 13314 114 111 1 1
## 13315 115 111 1 1
## 13316 116 111 1 1
## 13317 117 111 1 1
## 13318 118 111 1 1
## 13319 119 111 1 1
## 13320 120 111 1 1
## 13321 1 112 1 1
## 13322 2 112 1 1
## 13323 3 112 1 1
## 13324 4 112 1 1
## 13325 5 112 1 1
## 13326 6 112 1 1
## 13327 7 112 1 1
## 13328 8 112 1 1
## 13329 9 112 1 1
## 13330 10 112 1 1
## 13331 11 112 1 1
## 13332 12 112 1 1
## 13333 13 112 1 1
## 13334 14 112 1 1
## 13335 15 112 1 1
## 13336 16 112 1 1
## 13337 17 112 1 1
## 13338 18 112 1 1
## 13339 19 112 1 1
## 13340 20 112 1 1
## 13341 21 112 1 1
## 13342 22 112 1 1
## 13343 23 112 1 1
## 13344 24 112 1 1
## 13345 25 112 1 1
## 13346 26 112 1 1
## 13347 27 112 1 1
## 13348 28 112 1 1
## 13349 29 112 1 1
## 13350 30 112 1 1
## 13351 31 112 1 1
## 13352 32 112 1 1
## 13353 33 112 1 1
## 13354 34 112 1 1
## 13355 35 112 1 1
## 13356 36 112 1 1
## 13357 37 112 1 1
## 13358 38 112 1 1
## 13359 39 112 1 1
## 13360 40 112 1 1
## 13361 41 112 1 1
## 13362 42 112 1 1
## 13363 43 112 1 1
## 13364 44 112 1 1
## 13365 45 112 1 1
## 13366 46 112 1 1
## 13367 47 112 1 1
## 13368 48 112 1 1
## 13369 49 112 1 1
## 13370 50 112 1 1
## 13371 51 112 1 1
## 13372 52 112 1 1
## 13373 53 112 1 1
## 13374 54 112 1 1
## 13375 55 112 1 1
## 13376 56 112 1 1
## 13377 57 112 1 1
## 13378 58 112 1 1
## 13379 59 112 1 1
## 13380 60 112 1 1
## 13381 61 112 1 1
## 13382 62 112 1 1
## 13383 63 112 1 1
## 13384 64 112 1 1
## 13385 65 112 1 1
## 13386 66 112 1 1
## 13387 67 112 1 1
## 13388 68 112 1 1
## 13389 69 112 1 1
## 13390 70 112 1 1
## 13391 71 112 1 1
## 13392 72 112 1 1
## 13393 73 112 1 1
## 13394 74 112 1 1
## 13395 75 112 1 1
## 13396 76 112 1 1
## 13397 77 112 1 1
## 13398 78 112 1 1
## 13399 79 112 1 1
## 13400 80 112 1 1
## 13401 81 112 1 1
## 13402 82 112 1 1
## 13403 83 112 1 1
## 13404 84 112 1 1
## 13405 85 112 1 1
## 13406 86 112 1 1
## 13407 87 112 1 1
## 13408 88 112 1 1
## 13409 89 112 1 1
## 13410 90 112 1 1
## 13411 91 112 1 1
## 13412 92 112 1 1
## 13413 93 112 1 1
## 13414 94 112 1 1
## 13415 95 112 1 1
## 13416 96 112 1 1
## 13417 97 112 1 1
## 13418 98 112 1 1
## 13419 99 112 1 1
## 13420 100 112 1 1
## 13421 101 112 1 1
## 13422 102 112 1 1
## 13423 103 112 1 1
## 13424 104 112 1 1
## 13425 105 112 1 1
## 13426 106 112 1 1
## 13427 107 112 1 1
## 13428 108 112 1 1
## 13429 109 112 1 1
## 13430 110 112 1 1
## 13431 111 112 1 1
## 13432 112 112 1 1
## 13433 113 112 1 1
## 13434 114 112 1 1
## 13435 115 112 1 1
## 13436 116 112 1 1
## 13437 117 112 1 1
## 13438 118 112 1 1
## 13439 119 112 1 1
## 13440 120 112 1 1
## 13441 1 113 1 1
## 13442 2 113 1 1
## 13443 3 113 1 1
## 13444 4 113 1 1
## 13445 5 113 1 1
## 13446 6 113 1 1
## 13447 7 113 1 1
## 13448 8 113 1 1
## 13449 9 113 1 1
## 13450 10 113 1 1
## 13451 11 113 1 1
## 13452 12 113 1 1
## 13453 13 113 1 1
## 13454 14 113 1 1
## 13455 15 113 1 1
## 13456 16 113 1 1
## 13457 17 113 1 1
## 13458 18 113 1 1
## 13459 19 113 1 1
## 13460 20 113 1 1
## 13461 21 113 1 1
## 13462 22 113 1 1
## 13463 23 113 1 1
## 13464 24 113 1 1
## 13465 25 113 1 1
## 13466 26 113 1 1
## 13467 27 113 1 1
## 13468 28 113 1 1
## 13469 29 113 1 1
## 13470 30 113 1 1
## 13471 31 113 1 1
## 13472 32 113 1 1
## 13473 33 113 1 1
## 13474 34 113 1 1
## 13475 35 113 1 1
## 13476 36 113 1 1
## 13477 37 113 1 1
## 13478 38 113 1 1
## 13479 39 113 1 1
## 13480 40 113 1 1
## 13481 41 113 1 1
## 13482 42 113 1 1
## 13483 43 113 1 1
## 13484 44 113 1 1
## 13485 45 113 1 1
## 13486 46 113 1 1
## 13487 47 113 1 1
## 13488 48 113 1 1
## 13489 49 113 1 1
## 13490 50 113 1 1
## 13491 51 113 1 1
## 13492 52 113 1 1
## 13493 53 113 1 1
## 13494 54 113 1 1
## 13495 55 113 1 1
## 13496 56 113 1 1
## 13497 57 113 1 1
## 13498 58 113 1 1
## 13499 59 113 1 1
## 13500 60 113 1 1
## 13501 61 113 1 1
## 13502 62 113 1 1
## 13503 63 113 1 1
## 13504 64 113 1 1
## 13505 65 113 1 1
## 13506 66 113 1 1
## 13507 67 113 1 1
## 13508 68 113 1 1
## 13509 69 113 1 1
## 13510 70 113 1 1
## 13511 71 113 1 1
## 13512 72 113 1 1
## 13513 73 113 1 1
## 13514 74 113 1 1
## 13515 75 113 1 1
## 13516 76 113 1 1
## 13517 77 113 1 1
## 13518 78 113 1 1
## 13519 79 113 1 1
## 13520 80 113 1 1
## 13521 81 113 1 1
## 13522 82 113 1 1
## 13523 83 113 1 1
## 13524 84 113 1 1
## 13525 85 113 1 1
## 13526 86 113 1 1
## 13527 87 113 1 1
## 13528 88 113 1 1
## 13529 89 113 1 1
## 13530 90 113 1 1
## 13531 91 113 1 1
## 13532 92 113 1 1
## 13533 93 113 1 1
## 13534 94 113 1 1
## 13535 95 113 1 1
## 13536 96 113 1 1
## 13537 97 113 1 1
## 13538 98 113 1 1
## 13539 99 113 1 1
## 13540 100 113 1 1
## 13541 101 113 1 1
## 13542 102 113 1 1
## 13543 103 113 1 1
## 13544 104 113 1 1
## 13545 105 113 1 1
## 13546 106 113 1 1
## 13547 107 113 1 1
## 13548 108 113 1 1
## 13549 109 113 1 1
## 13550 110 113 1 1
## 13551 111 113 1 1
## 13552 112 113 1 1
## 13553 113 113 1 1
## 13554 114 113 1 1
## 13555 115 113 1 1
## 13556 116 113 1 1
## 13557 117 113 1 1
## 13558 118 113 1 1
## 13559 119 113 1 1
## 13560 120 113 1 1
## 13561 1 114 1 1
## 13562 2 114 1 1
## 13563 3 114 1 1
## 13564 4 114 1 1
## 13565 5 114 1 1
## 13566 6 114 1 1
## 13567 7 114 1 1
## 13568 8 114 1 1
## 13569 9 114 1 1
## 13570 10 114 1 1
## 13571 11 114 1 1
## 13572 12 114 1 1
## 13573 13 114 1 1
## 13574 14 114 1 1
## 13575 15 114 1 1
## 13576 16 114 1 1
## 13577 17 114 1 1
## 13578 18 114 1 1
## 13579 19 114 1 1
## 13580 20 114 1 1
## 13581 21 114 1 1
## 13582 22 114 1 1
## 13583 23 114 1 1
## 13584 24 114 1 1
## 13585 25 114 1 1
## 13586 26 114 1 1
## 13587 27 114 1 1
## 13588 28 114 1 1
## 13589 29 114 1 1
## 13590 30 114 1 1
## 13591 31 114 1 1
## 13592 32 114 1 1
## 13593 33 114 1 1
## 13594 34 114 1 1
## 13595 35 114 1 1
## 13596 36 114 1 1
## 13597 37 114 1 1
## 13598 38 114 1 1
## 13599 39 114 1 1
## 13600 40 114 1 1
## 13601 41 114 1 1
## 13602 42 114 1 1
## 13603 43 114 1 1
## 13604 44 114 1 1
## 13605 45 114 1 1
## 13606 46 114 1 1
## 13607 47 114 1 1
## 13608 48 114 1 1
## 13609 49 114 1 1
## 13610 50 114 1 1
## 13611 51 114 1 1
## 13612 52 114 1 1
## 13613 53 114 1 1
## 13614 54 114 1 1
## 13615 55 114 1 1
## 13616 56 114 1 1
## 13617 57 114 1 1
## 13618 58 114 1 1
## 13619 59 114 1 1
## 13620 60 114 1 1
## 13621 61 114 1 1
## 13622 62 114 1 1
## 13623 63 114 1 1
## 13624 64 114 1 1
## 13625 65 114 1 1
## 13626 66 114 1 1
## 13627 67 114 1 1
## 13628 68 114 1 1
## 13629 69 114 1 1
## 13630 70 114 1 1
## 13631 71 114 1 1
## 13632 72 114 1 1
## 13633 73 114 1 1
## 13634 74 114 1 1
## 13635 75 114 1 1
## 13636 76 114 1 1
## 13637 77 114 1 1
## 13638 78 114 1 1
## 13639 79 114 1 1
## 13640 80 114 1 1
## 13641 81 114 1 1
## 13642 82 114 1 1
## 13643 83 114 1 1
## 13644 84 114 1 1
## 13645 85 114 1 1
## 13646 86 114 1 1
## 13647 87 114 1 1
## 13648 88 114 1 1
## 13649 89 114 1 1
## 13650 90 114 1 1
## 13651 91 114 1 1
## 13652 92 114 1 1
## 13653 93 114 1 1
## 13654 94 114 1 1
## 13655 95 114 1 1
## 13656 96 114 1 1
## 13657 97 114 1 1
## 13658 98 114 1 1
## 13659 99 114 1 1
## 13660 100 114 1 1
## 13661 101 114 1 1
## 13662 102 114 1 1
## 13663 103 114 1 1
## 13664 104 114 1 1
## 13665 105 114 1 1
## 13666 106 114 1 1
## 13667 107 114 1 1
## 13668 108 114 1 1
## 13669 109 114 1 1
## 13670 110 114 1 1
## 13671 111 114 1 1
## 13672 112 114 1 1
## 13673 113 114 1 1
## 13674 114 114 1 1
## 13675 115 114 1 1
## 13676 116 114 1 1
## 13677 117 114 1 1
## 13678 118 114 1 1
## 13679 119 114 1 1
## 13680 120 114 1 1
## 13681 1 115 1 1
## 13682 2 115 1 1
## 13683 3 115 1 1
## 13684 4 115 1 1
## 13685 5 115 1 1
## 13686 6 115 1 1
## 13687 7 115 1 1
## 13688 8 115 1 1
## 13689 9 115 1 1
## 13690 10 115 1 1
## 13691 11 115 1 1
## 13692 12 115 1 1
## 13693 13 115 1 1
## 13694 14 115 1 1
## 13695 15 115 1 1
## 13696 16 115 1 1
## 13697 17 115 1 1
## 13698 18 115 1 1
## 13699 19 115 1 1
## 13700 20 115 1 1
## 13701 21 115 1 1
## 13702 22 115 1 1
## 13703 23 115 1 1
## 13704 24 115 1 1
## 13705 25 115 1 1
## 13706 26 115 1 1
## 13707 27 115 1 1
## 13708 28 115 1 1
## 13709 29 115 1 1
## 13710 30 115 1 1
## 13711 31 115 1 1
## 13712 32 115 1 1
## 13713 33 115 1 1
## 13714 34 115 1 1
## 13715 35 115 1 1
## 13716 36 115 1 1
## 13717 37 115 1 1
## 13718 38 115 1 1
## 13719 39 115 1 1
## 13720 40 115 1 1
## 13721 41 115 1 1
## 13722 42 115 1 1
## 13723 43 115 1 1
## 13724 44 115 1 1
## 13725 45 115 1 1
## 13726 46 115 1 1
## 13727 47 115 1 1
## 13728 48 115 1 1
## 13729 49 115 1 1
## 13730 50 115 1 1
## 13731 51 115 1 1
## 13732 52 115 1 1
## 13733 53 115 1 1
## 13734 54 115 1 1
## 13735 55 115 1 1
## 13736 56 115 1 1
## 13737 57 115 1 1
## 13738 58 115 1 1
## 13739 59 115 1 1
## 13740 60 115 1 1
## 13741 61 115 1 1
## 13742 62 115 1 1
## 13743 63 115 1 1
## 13744 64 115 1 1
## 13745 65 115 1 1
## 13746 66 115 1 1
## 13747 67 115 1 1
## 13748 68 115 1 1
## 13749 69 115 1 1
## 13750 70 115 1 1
## 13751 71 115 1 1
## 13752 72 115 1 1
## 13753 73 115 1 1
## 13754 74 115 1 1
## 13755 75 115 1 1
## 13756 76 115 1 1
## 13757 77 115 1 1
## 13758 78 115 1 1
## 13759 79 115 1 1
## 13760 80 115 1 1
## 13761 81 115 1 1
## 13762 82 115 1 1
## 13763 83 115 1 1
## 13764 84 115 1 1
## 13765 85 115 1 1
## 13766 86 115 1 1
## 13767 87 115 1 1
## 13768 88 115 1 1
## 13769 89 115 1 1
## 13770 90 115 1 1
## 13771 91 115 1 1
## 13772 92 115 1 1
## 13773 93 115 1 1
## 13774 94 115 1 1
## 13775 95 115 1 1
## 13776 96 115 1 1
## 13777 97 115 1 1
## 13778 98 115 1 1
## 13779 99 115 1 1
## 13780 100 115 1 1
## 13781 101 115 1 1
## 13782 102 115 1 1
## 13783 103 115 1 1
## 13784 104 115 1 1
## 13785 105 115 1 1
## 13786 106 115 1 1
## 13787 107 115 1 1
## 13788 108 115 1 1
## 13789 109 115 1 1
## 13790 110 115 1 1
## 13791 111 115 1 1
## 13792 112 115 1 1
## 13793 113 115 1 1
## 13794 114 115 1 1
## 13795 115 115 1 1
## 13796 116 115 1 1
## 13797 117 115 1 1
## 13798 118 115 1 1
## 13799 119 115 1 1
## 13800 120 115 1 1
## 13801 1 116 1 1
## 13802 2 116 1 1
## 13803 3 116 1 1
## 13804 4 116 1 1
## 13805 5 116 1 1
## 13806 6 116 1 1
## 13807 7 116 1 1
## 13808 8 116 1 1
## 13809 9 116 1 1
## 13810 10 116 1 1
## 13811 11 116 1 1
## 13812 12 116 1 1
## 13813 13 116 1 1
## 13814 14 116 1 1
## 13815 15 116 1 1
## 13816 16 116 1 1
## 13817 17 116 1 1
## 13818 18 116 1 1
## 13819 19 116 1 1
## 13820 20 116 1 1
## 13821 21 116 1 1
## 13822 22 116 1 1
## 13823 23 116 1 1
## 13824 24 116 1 1
## 13825 25 116 1 1
## 13826 26 116 1 1
## 13827 27 116 1 1
## 13828 28 116 1 1
## 13829 29 116 1 1
## 13830 30 116 1 1
## 13831 31 116 1 1
## 13832 32 116 1 1
## 13833 33 116 1 1
## 13834 34 116 1 1
## 13835 35 116 1 1
## 13836 36 116 1 1
## 13837 37 116 1 1
## 13838 38 116 1 1
## 13839 39 116 1 1
## 13840 40 116 1 1
## 13841 41 116 1 1
## 13842 42 116 1 1
## 13843 43 116 1 1
## 13844 44 116 1 1
## 13845 45 116 1 1
## 13846 46 116 1 1
## 13847 47 116 1 1
## 13848 48 116 1 1
## 13849 49 116 1 1
## 13850 50 116 1 1
## 13851 51 116 1 1
## 13852 52 116 1 1
## 13853 53 116 1 1
## 13854 54 116 1 1
## 13855 55 116 1 1
## 13856 56 116 1 1
## 13857 57 116 1 1
## 13858 58 116 1 1
## 13859 59 116 1 1
## 13860 60 116 1 1
## 13861 61 116 1 1
## 13862 62 116 1 1
## 13863 63 116 1 1
## 13864 64 116 1 1
## 13865 65 116 1 1
## 13866 66 116 1 1
## 13867 67 116 1 1
## 13868 68 116 1 1
## 13869 69 116 1 1
## 13870 70 116 1 1
## 13871 71 116 1 1
## 13872 72 116 1 1
## 13873 73 116 1 1
## 13874 74 116 1 1
## 13875 75 116 1 1
## 13876 76 116 1 1
## 13877 77 116 1 1
## 13878 78 116 1 1
## 13879 79 116 1 1
## 13880 80 116 1 1
## 13881 81 116 1 1
## 13882 82 116 1 1
## 13883 83 116 1 1
## 13884 84 116 1 1
## 13885 85 116 1 1
## 13886 86 116 1 1
## 13887 87 116 1 1
## 13888 88 116 1 1
## 13889 89 116 1 1
## 13890 90 116 1 1
## 13891 91 116 1 1
## 13892 92 116 1 1
## 13893 93 116 1 1
## 13894 94 116 1 1
## 13895 95 116 1 1
## 13896 96 116 1 1
## 13897 97 116 1 1
## 13898 98 116 1 1
## 13899 99 116 1 1
## 13900 100 116 1 1
## 13901 101 116 1 1
## 13902 102 116 1 1
## 13903 103 116 1 1
## 13904 104 116 1 1
## 13905 105 116 1 1
## 13906 106 116 1 1
## 13907 107 116 1 1
## 13908 108 116 1 1
## 13909 109 116 1 1
## 13910 110 116 1 1
## 13911 111 116 1 1
## 13912 112 116 1 1
## 13913 113 116 1 1
## 13914 114 116 1 1
## 13915 115 116 1 1
## 13916 116 116 1 1
## 13917 117 116 1 1
## 13918 118 116 1 1
## 13919 119 116 1 1
## 13920 120 116 1 1
## 13921 1 117 1 1
## 13922 2 117 1 1
## 13923 3 117 1 1
## 13924 4 117 1 1
## 13925 5 117 1 1
## 13926 6 117 1 1
## 13927 7 117 1 1
## 13928 8 117 1 1
## 13929 9 117 1 1
## 13930 10 117 1 1
## 13931 11 117 1 1
## 13932 12 117 1 1
## 13933 13 117 1 1
## 13934 14 117 1 1
## 13935 15 117 1 1
## 13936 16 117 1 1
## 13937 17 117 1 1
## 13938 18 117 1 1
## 13939 19 117 1 1
## 13940 20 117 1 1
## 13941 21 117 1 1
## 13942 22 117 1 1
## 13943 23 117 1 1
## 13944 24 117 1 1
## 13945 25 117 1 1
## 13946 26 117 1 1
## 13947 27 117 1 1
## 13948 28 117 1 1
## 13949 29 117 1 1
## 13950 30 117 1 1
## 13951 31 117 1 1
## 13952 32 117 1 1
## 13953 33 117 1 1
## 13954 34 117 1 1
## 13955 35 117 1 1
## 13956 36 117 1 1
## 13957 37 117 1 1
## 13958 38 117 1 1
## 13959 39 117 1 1
## 13960 40 117 1 1
## 13961 41 117 1 1
## 13962 42 117 1 1
## 13963 43 117 1 1
## 13964 44 117 1 1
## 13965 45 117 1 1
## 13966 46 117 1 1
## 13967 47 117 1 1
## 13968 48 117 1 1
## 13969 49 117 1 1
## 13970 50 117 1 1
## 13971 51 117 1 1
## 13972 52 117 1 1
## 13973 53 117 1 1
## 13974 54 117 1 1
## 13975 55 117 1 1
## 13976 56 117 1 1
## 13977 57 117 1 1
## 13978 58 117 1 1
## 13979 59 117 1 1
## 13980 60 117 1 1
## 13981 61 117 1 1
## 13982 62 117 1 1
## 13983 63 117 1 1
## 13984 64 117 1 1
## 13985 65 117 1 1
## 13986 66 117 1 1
## 13987 67 117 1 1
## 13988 68 117 1 1
## 13989 69 117 1 1
## 13990 70 117 1 1
## 13991 71 117 1 1
## 13992 72 117 1 1
## 13993 73 117 1 1
## 13994 74 117 1 1
## 13995 75 117 1 1
## 13996 76 117 1 1
## 13997 77 117 1 1
## 13998 78 117 1 1
## 13999 79 117 1 1
## 14000 80 117 1 1
## 14001 81 117 1 1
## 14002 82 117 1 1
## 14003 83 117 1 1
## 14004 84 117 1 1
## 14005 85 117 1 1
## 14006 86 117 1 1
## 14007 87 117 1 1
## 14008 88 117 1 1
## 14009 89 117 1 1
## 14010 90 117 1 1
## 14011 91 117 1 1
## 14012 92 117 1 1
## 14013 93 117 1 1
## 14014 94 117 1 1
## 14015 95 117 1 1
## 14016 96 117 1 1
## 14017 97 117 1 1
## 14018 98 117 1 1
## 14019 99 117 1 1
## 14020 100 117 1 1
## 14021 101 117 1 1
## 14022 102 117 1 1
## 14023 103 117 1 1
## 14024 104 117 1 1
## 14025 105 117 1 1
## 14026 106 117 1 1
## 14027 107 117 1 1
## 14028 108 117 1 1
## 14029 109 117 1 1
## 14030 110 117 1 1
## 14031 111 117 1 1
## 14032 112 117 1 1
## 14033 113 117 1 1
## 14034 114 117 1 1
## 14035 115 117 1 1
## 14036 116 117 1 1
## 14037 117 117 1 1
## 14038 118 117 1 1
## 14039 119 117 1 1
## 14040 120 117 1 1
## 14041 1 118 1 1
## 14042 2 118 1 1
## 14043 3 118 1 1
## 14044 4 118 1 1
## 14045 5 118 1 1
## 14046 6 118 1 1
## 14047 7 118 1 1
## 14048 8 118 1 1
## 14049 9 118 1 1
## 14050 10 118 1 1
## 14051 11 118 1 1
## 14052 12 118 1 1
## 14053 13 118 1 1
## 14054 14 118 1 1
## 14055 15 118 1 1
## 14056 16 118 1 1
## 14057 17 118 1 1
## 14058 18 118 1 1
## 14059 19 118 1 1
## 14060 20 118 1 1
## 14061 21 118 1 1
## 14062 22 118 1 1
## 14063 23 118 1 1
## 14064 24 118 1 1
## 14065 25 118 1 1
## 14066 26 118 1 1
## 14067 27 118 1 1
## 14068 28 118 1 1
## 14069 29 118 1 1
## 14070 30 118 1 1
## 14071 31 118 1 1
## 14072 32 118 1 1
## 14073 33 118 1 1
## 14074 34 118 1 1
## 14075 35 118 1 1
## 14076 36 118 1 1
## 14077 37 118 1 1
## 14078 38 118 1 1
## 14079 39 118 1 1
## 14080 40 118 1 1
## 14081 41 118 1 1
## 14082 42 118 1 1
## 14083 43 118 1 1
## 14084 44 118 1 1
## 14085 45 118 1 1
## 14086 46 118 1 1
## 14087 47 118 1 1
## 14088 48 118 1 1
## 14089 49 118 1 1
## 14090 50 118 1 1
## 14091 51 118 1 1
## 14092 52 118 1 1
## 14093 53 118 1 1
## 14094 54 118 1 1
## 14095 55 118 1 1
## 14096 56 118 1 1
## 14097 57 118 1 1
## 14098 58 118 1 1
## 14099 59 118 1 1
## 14100 60 118 1 1
## 14101 61 118 1 1
## 14102 62 118 1 1
## 14103 63 118 1 1
## 14104 64 118 1 1
## 14105 65 118 1 1
## 14106 66 118 1 1
## 14107 67 118 1 1
## 14108 68 118 1 1
## 14109 69 118 1 1
## 14110 70 118 1 1
## 14111 71 118 1 1
## 14112 72 118 1 1
## 14113 73 118 1 1
## 14114 74 118 1 1
## 14115 75 118 1 1
## 14116 76 118 1 1
## 14117 77 118 1 1
## 14118 78 118 1 1
## 14119 79 118 1 1
## 14120 80 118 1 1
## 14121 81 118 1 1
## 14122 82 118 1 1
## 14123 83 118 1 1
## 14124 84 118 1 1
## 14125 85 118 1 1
## 14126 86 118 1 1
## 14127 87 118 1 1
## 14128 88 118 1 1
## 14129 89 118 1 1
## 14130 90 118 1 1
## 14131 91 118 1 1
## 14132 92 118 1 1
## 14133 93 118 1 1
## 14134 94 118 1 1
## 14135 95 118 1 1
## 14136 96 118 1 1
## 14137 97 118 1 1
## 14138 98 118 1 1
## 14139 99 118 1 1
## 14140 100 118 1 1
## 14141 101 118 1 1
## 14142 102 118 1 1
## 14143 103 118 1 1
## 14144 104 118 1 1
## 14145 105 118 1 1
## 14146 106 118 1 1
## 14147 107 118 1 1
## 14148 108 118 1 1
## 14149 109 118 1 1
## 14150 110 118 1 1
## 14151 111 118 1 1
## 14152 112 118 1 1
## 14153 113 118 1 1
## 14154 114 118 1 1
## 14155 115 118 1 1
## 14156 116 118 1 1
## 14157 117 118 1 1
## 14158 118 118 1 1
## 14159 119 118 1 1
## 14160 120 118 1 1
## 14161 1 119 1 1
## 14162 2 119 1 1
## 14163 3 119 1 1
## 14164 4 119 1 1
## 14165 5 119 1 1
## 14166 6 119 1 1
## 14167 7 119 1 1
## 14168 8 119 1 1
## 14169 9 119 1 1
## 14170 10 119 1 1
## 14171 11 119 1 1
## 14172 12 119 1 1
## 14173 13 119 1 1
## 14174 14 119 1 1
## 14175 15 119 1 1
## 14176 16 119 1 1
## 14177 17 119 1 1
## 14178 18 119 1 1
## 14179 19 119 1 1
## 14180 20 119 1 1
## 14181 21 119 1 1
## 14182 22 119 1 1
## 14183 23 119 1 1
## 14184 24 119 1 1
## 14185 25 119 1 1
## 14186 26 119 1 1
## 14187 27 119 1 1
## 14188 28 119 1 1
## 14189 29 119 1 1
## 14190 30 119 1 1
## 14191 31 119 1 1
## 14192 32 119 1 1
## 14193 33 119 1 1
## 14194 34 119 1 1
## 14195 35 119 1 1
## 14196 36 119 1 1
## 14197 37 119 1 1
## 14198 38 119 1 1
## 14199 39 119 1 1
## 14200 40 119 1 1
## 14201 41 119 1 1
## 14202 42 119 1 1
## 14203 43 119 1 1
## 14204 44 119 1 1
## 14205 45 119 1 1
## 14206 46 119 1 1
## 14207 47 119 1 1
## 14208 48 119 1 1
## 14209 49 119 1 1
## 14210 50 119 1 1
## 14211 51 119 1 1
## 14212 52 119 1 1
## 14213 53 119 1 1
## 14214 54 119 1 1
## 14215 55 119 1 1
## 14216 56 119 1 1
## 14217 57 119 1 1
## 14218 58 119 1 1
## 14219 59 119 1 1
## 14220 60 119 1 1
## 14221 61 119 1 1
## 14222 62 119 1 1
## 14223 63 119 1 1
## 14224 64 119 1 1
## 14225 65 119 1 1
## 14226 66 119 1 1
## 14227 67 119 1 1
## 14228 68 119 1 1
## 14229 69 119 1 1
## 14230 70 119 1 1
## 14231 71 119 1 1
## 14232 72 119 1 1
## 14233 73 119 1 1
## 14234 74 119 1 1
## 14235 75 119 1 1
## 14236 76 119 1 1
## 14237 77 119 1 1
## 14238 78 119 1 1
## 14239 79 119 1 1
## 14240 80 119 1 1
## 14241 81 119 1 1
## 14242 82 119 1 1
## 14243 83 119 1 1
## 14244 84 119 1 1
## 14245 85 119 1 1
## 14246 86 119 1 1
## 14247 87 119 1 1
## 14248 88 119 1 1
## 14249 89 119 1 1
## 14250 90 119 1 1
## 14251 91 119 1 1
## 14252 92 119 1 1
## 14253 93 119 1 1
## 14254 94 119 1 1
## 14255 95 119 1 1
## 14256 96 119 1 1
## 14257 97 119 1 1
## 14258 98 119 1 1
## 14259 99 119 1 1
## 14260 100 119 1 1
## 14261 101 119 1 1
## 14262 102 119 1 1
## 14263 103 119 1 1
## 14264 104 119 1 1
## 14265 105 119 1 1
## 14266 106 119 1 1
## 14267 107 119 1 1
## 14268 108 119 1 1
## 14269 109 119 1 1
## 14270 110 119 1 1
## 14271 111 119 1 1
## 14272 112 119 1 1
## 14273 113 119 1 1
## 14274 114 119 1 1
## 14275 115 119 1 1
## 14276 116 119 1 1
## 14277 117 119 1 1
## 14278 118 119 1 1
## 14279 119 119 1 1
## 14280 120 119 1 1
## 14281 1 120 1 1
## 14282 2 120 1 1
## 14283 3 120 1 1
## 14284 4 120 1 1
## 14285 5 120 1 1
## 14286 6 120 1 1
## 14287 7 120 1 1
## 14288 8 120 1 1
## 14289 9 120 1 1
## 14290 10 120 1 1
## 14291 11 120 1 1
## 14292 12 120 1 1
## 14293 13 120 1 1
## 14294 14 120 1 1
## 14295 15 120 1 1
## 14296 16 120 1 1
## 14297 17 120 1 1
## 14298 18 120 1 1
## 14299 19 120 1 1
## 14300 20 120 1 1
## 14301 21 120 1 1
## 14302 22 120 1 1
## 14303 23 120 1 1
## 14304 24 120 1 1
## 14305 25 120 1 1
## 14306 26 120 1 1
## 14307 27 120 1 1
## 14308 28 120 1 1
## 14309 29 120 1 1
## 14310 30 120 1 1
## 14311 31 120 1 1
## 14312 32 120 1 1
## 14313 33 120 1 1
## 14314 34 120 1 1
## 14315 35 120 1 1
## 14316 36 120 1 1
## 14317 37 120 1 1
## 14318 38 120 1 1
## 14319 39 120 1 1
## 14320 40 120 1 1
## 14321 41 120 1 1
## 14322 42 120 1 1
## 14323 43 120 1 1
## 14324 44 120 1 1
## 14325 45 120 1 1
## 14326 46 120 1 1
## 14327 47 120 1 1
## 14328 48 120 1 1
## 14329 49 120 1 1
## 14330 50 120 1 1
## 14331 51 120 1 1
## 14332 52 120 1 1
## 14333 53 120 1 1
## 14334 54 120 1 1
## 14335 55 120 1 1
## 14336 56 120 1 1
## 14337 57 120 1 1
## 14338 58 120 1 1
## 14339 59 120 1 1
## 14340 60 120 1 1
## 14341 61 120 1 1
## 14342 62 120 1 1
## 14343 63 120 1 1
## 14344 64 120 1 1
## 14345 65 120 1 1
## 14346 66 120 1 1
## 14347 67 120 1 1
## 14348 68 120 1 1
## 14349 69 120 1 1
## 14350 70 120 1 1
## 14351 71 120 1 1
## 14352 72 120 1 1
## 14353 73 120 1 1
## 14354 74 120 1 1
## 14355 75 120 1 1
## 14356 76 120 1 1
## 14357 77 120 1 1
## 14358 78 120 1 1
## 14359 79 120 1 1
## 14360 80 120 1 1
## 14361 81 120 1 1
## 14362 82 120 1 1
## 14363 83 120 1 1
## 14364 84 120 1 1
## 14365 85 120 1 1
## 14366 86 120 1 1
## 14367 87 120 1 1
## 14368 88 120 1 1
## 14369 89 120 1 1
## 14370 90 120 1 1
## 14371 91 120 1 1
## 14372 92 120 1 1
## 14373 93 120 1 1
## 14374 94 120 1 1
## 14375 95 120 1 1
## 14376 96 120 1 1
## 14377 97 120 1 1
## 14378 98 120 1 1
## 14379 99 120 1 1
## 14380 100 120 1 1
## 14381 101 120 1 1
## 14382 102 120 1 1
## 14383 103 120 1 1
## 14384 104 120 1 1
## 14385 105 120 1 1
## 14386 106 120 1 1
## 14387 107 120 1 1
## 14388 108 120 1 1
## 14389 109 120 1 1
## 14390 110 120 1 1
## 14391 111 120 1 1
## 14392 112 120 1 1
## 14393 113 120 1 1
## 14394 114 120 1 1
## 14395 115 120 1 1
## 14396 116 120 1 1
## 14397 117 120 1 1
## 14398 118 120 1 1
## 14399 119 120 1 1
## 14400 120 120 1 1
## 14401 1 1 2 1
## 14402 2 1 2 1
## 14403 3 1 2 1
## 14404 4 1 2 1
## 14405 5 1 2 1
## 14406 6 1 2 1
## 14407 7 1 2 1
## 14408 8 1 2 1
## 14409 9 1 2 1
## 14410 10 1 2 1
## 14411 11 1 2 1
## 14412 12 1 2 1
## 14413 13 1 2 1
## 14414 14 1 2 1
## 14415 15 1 2 1
## 14416 16 1 2 1
## 14417 17 1 2 1
## 14418 18 1 2 1
## 14419 19 1 2 1
## 14420 20 1 2 1
## 14421 21 1 2 1
## 14422 22 1 2 1
## 14423 23 1 2 1
## 14424 24 1 2 1
## 14425 25 1 2 1
## 14426 26 1 2 1
## 14427 27 1 2 1
## 14428 28 1 2 1
## 14429 29 1 2 1
## 14430 30 1 2 1
## 14431 31 1 2 1
## 14432 32 1 2 1
## 14433 33 1 2 1
## 14434 34 1 2 1
## 14435 35 1 2 1
## 14436 36 1 2 1
## 14437 37 1 2 1
## 14438 38 1 2 1
## 14439 39 1 2 1
## 14440 40 1 2 1
## 14441 41 1 2 1
## 14442 42 1 2 1
## 14443 43 1 2 1
## 14444 44 1 2 1
## 14445 45 1 2 1
## 14446 46 1 2 1
## 14447 47 1 2 1
## 14448 48 1 2 1
## 14449 49 1 2 1
## 14450 50 1 2 1
## 14451 51 1 2 1
## 14452 52 1 2 1
## 14453 53 1 2 1
## 14454 54 1 2 1
## 14455 55 1 2 1
## 14456 56 1 2 1
## 14457 57 1 2 1
## 14458 58 1 2 1
## 14459 59 1 2 1
## 14460 60 1 2 1
## 14461 61 1 2 1
## 14462 62 1 2 1
## 14463 63 1 2 1
## 14464 64 1 2 1
## 14465 65 1 2 1
## 14466 66 1 2 1
## 14467 67 1 2 1
## 14468 68 1 2 1
## 14469 69 1 2 1
## 14470 70 1 2 1
## 14471 71 1 2 1
## 14472 72 1 2 1
## 14473 73 1 2 1
## 14474 74 1 2 1
## 14475 75 1 2 1
## 14476 76 1 2 1
## 14477 77 1 2 1
## 14478 78 1 2 1
## 14479 79 1 2 1
## 14480 80 1 2 1
## 14481 81 1 2 1
## 14482 82 1 2 1
## 14483 83 1 2 1
## 14484 84 1 2 1
## 14485 85 1 2 1
## 14486 86 1 2 1
## 14487 87 1 2 1
## 14488 88 1 2 1
## 14489 89 1 2 1
## 14490 90 1 2 1
## 14491 91 1 2 1
## 14492 92 1 2 1
## 14493 93 1 2 1
## 14494 94 1 2 1
## 14495 95 1 2 1
## 14496 96 1 2 1
## 14497 97 1 2 1
## 14498 98 1 2 1
## 14499 99 1 2 1
## 14500 100 1 2 1
## 14501 101 1 2 1
## 14502 102 1 2 1
## 14503 103 1 2 1
## 14504 104 1 2 1
## 14505 105 1 2 1
## 14506 106 1 2 1
## 14507 107 1 2 1
## 14508 108 1 2 1
## 14509 109 1 2 1
## 14510 110 1 2 1
## 14511 111 1 2 1
## 14512 112 1 2 1
## 14513 113 1 2 1
## 14514 114 1 2 1
## 14515 115 1 2 1
## 14516 116 1 2 1
## 14517 117 1 2 1
## 14518 118 1 2 1
## 14519 119 1 2 1
## 14520 120 1 2 1
## 14521 1 2 2 1
## 14522 2 2 2 1
## 14523 3 2 2 1
## 14524 4 2 2 1
## 14525 5 2 2 1
## 14526 6 2 2 1
## 14527 7 2 2 1
## 14528 8 2 2 1
## 14529 9 2 2 1
## 14530 10 2 2 1
## 14531 11 2 2 1
## 14532 12 2 2 1
## 14533 13 2 2 1
## 14534 14 2 2 1
## 14535 15 2 2 1
## 14536 16 2 2 1
## 14537 17 2 2 1
## 14538 18 2 2 1
## 14539 19 2 2 1
## 14540 20 2 2 1
## 14541 21 2 2 1
## 14542 22 2 2 1
## 14543 23 2 2 1
## 14544 24 2 2 1
## 14545 25 2 2 1
## 14546 26 2 2 1
## 14547 27 2 2 1
## 14548 28 2 2 1
## 14549 29 2 2 1
## 14550 30 2 2 1
## 14551 31 2 2 1
## 14552 32 2 2 1
## 14553 33 2 2 1
## 14554 34 2 2 1
## 14555 35 2 2 1
## 14556 36 2 2 1
## 14557 37 2 2 1
## 14558 38 2 2 1
## 14559 39 2 2 1
## 14560 40 2 2 1
## 14561 41 2 2 1
## 14562 42 2 2 1
## 14563 43 2 2 1
## 14564 44 2 2 1
## 14565 45 2 2 1
## 14566 46 2 2 1
## 14567 47 2 2 1
## 14568 48 2 2 1
## 14569 49 2 2 1
## 14570 50 2 2 1
## 14571 51 2 2 1
## 14572 52 2 2 1
## 14573 53 2 2 1
## 14574 54 2 2 1
## 14575 55 2 2 1
## 14576 56 2 2 1
## 14577 57 2 2 1
## 14578 58 2 2 1
## 14579 59 2 2 1
## 14580 60 2 2 1
## 14581 61 2 2 1
## 14582 62 2 2 1
## 14583 63 2 2 1
## 14584 64 2 2 1
## 14585 65 2 2 1
## 14586 66 2 2 1
## 14587 67 2 2 1
## 14588 68 2 2 1
## 14589 69 2 2 1
## 14590 70 2 2 1
## 14591 71 2 2 1
## 14592 72 2 2 1
## 14593 73 2 2 1
## 14594 74 2 2 1
## 14595 75 2 2 1
## 14596 76 2 2 1
## 14597 77 2 2 1
## 14598 78 2 2 1
## 14599 79 2 2 1
## 14600 80 2 2 1
## 14601 81 2 2 1
## 14602 82 2 2 1
## 14603 83 2 2 1
## 14604 84 2 2 1
## 14605 85 2 2 1
## 14606 86 2 2 1
## 14607 87 2 2 1
## 14608 88 2 2 1
## 14609 89 2 2 1
## 14610 90 2 2 1
## 14611 91 2 2 1
## 14612 92 2 2 1
## 14613 93 2 2 1
## 14614 94 2 2 1
## 14615 95 2 2 1
## 14616 96 2 2 1
## 14617 97 2 2 1
## 14618 98 2 2 1
## 14619 99 2 2 1
## 14620 100 2 2 1
## 14621 101 2 2 1
## 14622 102 2 2 1
## 14623 103 2 2 1
## 14624 104 2 2 1
## 14625 105 2 2 1
## 14626 106 2 2 1
## 14627 107 2 2 1
## 14628 108 2 2 1
## 14629 109 2 2 1
## 14630 110 2 2 1
## 14631 111 2 2 1
## 14632 112 2 2 1
## 14633 113 2 2 1
## 14634 114 2 2 1
## 14635 115 2 2 1
## 14636 116 2 2 1
## 14637 117 2 2 1
## 14638 118 2 2 1
## 14639 119 2 2 1
## 14640 120 2 2 1
## 14641 1 3 2 1
## 14642 2 3 2 1
## 14643 3 3 2 1
## 14644 4 3 2 1
## 14645 5 3 2 1
## 14646 6 3 2 1
## 14647 7 3 2 1
## 14648 8 3 2 1
## 14649 9 3 2 1
## 14650 10 3 2 1
## 14651 11 3 2 1
## 14652 12 3 2 1
## 14653 13 3 2 1
## 14654 14 3 2 1
## 14655 15 3 2 1
## 14656 16 3 2 1
## 14657 17 3 2 1
## 14658 18 3 2 1
## 14659 19 3 2 1
## 14660 20 3 2 1
## 14661 21 3 2 1
## 14662 22 3 2 1
## 14663 23 3 2 1
## 14664 24 3 2 1
## 14665 25 3 2 1
## 14666 26 3 2 1
## 14667 27 3 2 1
## 14668 28 3 2 1
## 14669 29 3 2 1
## 14670 30 3 2 1
## 14671 31 3 2 1
## 14672 32 3 2 1
## 14673 33 3 2 1
## 14674 34 3 2 1
## 14675 35 3 2 1
## 14676 36 3 2 1
## 14677 37 3 2 1
## 14678 38 3 2 1
## 14679 39 3 2 1
## 14680 40 3 2 1
## 14681 41 3 2 1
## 14682 42 3 2 1
## 14683 43 3 2 1
## 14684 44 3 2 1
## 14685 45 3 2 1
## 14686 46 3 2 1
## 14687 47 3 2 1
## 14688 48 3 2 1
## 14689 49 3 2 1
## 14690 50 3 2 1
## 14691 51 3 2 1
## 14692 52 3 2 1
## 14693 53 3 2 1
## 14694 54 3 2 1
## 14695 55 3 2 1
## 14696 56 3 2 1
## 14697 57 3 2 1
## 14698 58 3 2 1
## 14699 59 3 2 1
## 14700 60 3 2 1
## 14701 61 3 2 1
## 14702 62 3 2 1
## 14703 63 3 2 1
## 14704 64 3 2 1
## 14705 65 3 2 1
## 14706 66 3 2 1
## 14707 67 3 2 1
## 14708 68 3 2 1
## 14709 69 3 2 1
## 14710 70 3 2 1
## 14711 71 3 2 1
## 14712 72 3 2 1
## 14713 73 3 2 1
## 14714 74 3 2 1
## 14715 75 3 2 1
## 14716 76 3 2 1
## 14717 77 3 2 1
## 14718 78 3 2 1
## 14719 79 3 2 1
## 14720 80 3 2 1
## 14721 81 3 2 1
## 14722 82 3 2 1
## 14723 83 3 2 1
## 14724 84 3 2 1
## 14725 85 3 2 1
## 14726 86 3 2 1
## 14727 87 3 2 1
## 14728 88 3 2 1
## 14729 89 3 2 1
## 14730 90 3 2 1
## 14731 91 3 2 1
## 14732 92 3 2 1
## 14733 93 3 2 1
## 14734 94 3 2 1
## 14735 95 3 2 1
## 14736 96 3 2 1
## 14737 97 3 2 1
## 14738 98 3 2 1
## 14739 99 3 2 1
## 14740 100 3 2 1
## 14741 101 3 2 1
## 14742 102 3 2 1
## 14743 103 3 2 1
## 14744 104 3 2 1
## 14745 105 3 2 1
## 14746 106 3 2 1
## 14747 107 3 2 1
## 14748 108 3 2 1
## 14749 109 3 2 1
## 14750 110 3 2 1
## 14751 111 3 2 1
## 14752 112 3 2 1
## 14753 113 3 2 1
## 14754 114 3 2 1
## 14755 115 3 2 1
## 14756 116 3 2 1
## 14757 117 3 2 1
## 14758 118 3 2 1
## 14759 119 3 2 1
## 14760 120 3 2 1
## 14761 1 4 2 1
## 14762 2 4 2 1
## 14763 3 4 2 1
## 14764 4 4 2 1
## 14765 5 4 2 1
## 14766 6 4 2 1
## 14767 7 4 2 1
## 14768 8 4 2 1
## 14769 9 4 2 1
## 14770 10 4 2 1
## 14771 11 4 2 1
## 14772 12 4 2 1
## 14773 13 4 2 1
## 14774 14 4 2 1
## 14775 15 4 2 1
## 14776 16 4 2 1
## 14777 17 4 2 1
## 14778 18 4 2 1
## 14779 19 4 2 1
## 14780 20 4 2 1
## 14781 21 4 2 1
## 14782 22 4 2 1
## 14783 23 4 2 1
## 14784 24 4 2 1
## 14785 25 4 2 1
## 14786 26 4 2 1
## 14787 27 4 2 1
## 14788 28 4 2 1
## 14789 29 4 2 1
## 14790 30 4 2 1
## 14791 31 4 2 1
## 14792 32 4 2 1
## 14793 33 4 2 1
## 14794 34 4 2 1
## 14795 35 4 2 1
## 14796 36 4 2 1
## 14797 37 4 2 1
## 14798 38 4 2 1
## 14799 39 4 2 1
## 14800 40 4 2 1
## 14801 41 4 2 1
## 14802 42 4 2 1
## 14803 43 4 2 1
## 14804 44 4 2 1
## 14805 45 4 2 1
## 14806 46 4 2 1
## 14807 47 4 2 1
## 14808 48 4 2 1
## 14809 49 4 2 1
## 14810 50 4 2 1
## 14811 51 4 2 1
## 14812 52 4 2 1
## 14813 53 4 2 1
## 14814 54 4 2 1
## 14815 55 4 2 1
## 14816 56 4 2 1
## 14817 57 4 2 1
## 14818 58 4 2 1
## 14819 59 4 2 1
## 14820 60 4 2 1
## 14821 61 4 2 1
## 14822 62 4 2 1
## 14823 63 4 2 1
## 14824 64 4 2 1
## 14825 65 4 2 1
## 14826 66 4 2 1
## 14827 67 4 2 1
## 14828 68 4 2 1
## 14829 69 4 2 1
## 14830 70 4 2 1
## 14831 71 4 2 1
## 14832 72 4 2 1
## 14833 73 4 2 1
## 14834 74 4 2 1
## 14835 75 4 2 1
## 14836 76 4 2 1
## 14837 77 4 2 1
## 14838 78 4 2 1
## 14839 79 4 2 1
## 14840 80 4 2 1
## 14841 81 4 2 1
## 14842 82 4 2 1
## 14843 83 4 2 1
## 14844 84 4 2 1
## 14845 85 4 2 1
## 14846 86 4 2 1
## 14847 87 4 2 1
## 14848 88 4 2 1
## 14849 89 4 2 1
## 14850 90 4 2 1
## 14851 91 4 2 1
## 14852 92 4 2 1
## 14853 93 4 2 1
## 14854 94 4 2 1
## 14855 95 4 2 1
## 14856 96 4 2 1
## 14857 97 4 2 1
## 14858 98 4 2 1
## 14859 99 4 2 1
## 14860 100 4 2 1
## 14861 101 4 2 1
## 14862 102 4 2 1
## 14863 103 4 2 1
## 14864 104 4 2 1
## 14865 105 4 2 1
## 14866 106 4 2 1
## 14867 107 4 2 1
## 14868 108 4 2 1
## 14869 109 4 2 1
## 14870 110 4 2 1
## 14871 111 4 2 1
## 14872 112 4 2 1
## 14873 113 4 2 1
## 14874 114 4 2 1
## 14875 115 4 2 1
## 14876 116 4 2 1
## 14877 117 4 2 1
## 14878 118 4 2 1
## 14879 119 4 2 1
## 14880 120 4 2 1
## 14881 1 5 2 1
## 14882 2 5 2 1
## 14883 3 5 2 1
## 14884 4 5 2 1
## 14885 5 5 2 1
## 14886 6 5 2 1
## 14887 7 5 2 1
## 14888 8 5 2 1
## 14889 9 5 2 1
## 14890 10 5 2 1
## 14891 11 5 2 1
## 14892 12 5 2 1
## 14893 13 5 2 1
## 14894 14 5 2 1
## 14895 15 5 2 1
## 14896 16 5 2 1
## 14897 17 5 2 1
## 14898 18 5 2 1
## 14899 19 5 2 1
## 14900 20 5 2 1
## 14901 21 5 2 1
## 14902 22 5 2 1
## 14903 23 5 2 1
## 14904 24 5 2 1
## 14905 25 5 2 1
## 14906 26 5 2 1
## 14907 27 5 2 1
## 14908 28 5 2 1
## 14909 29 5 2 1
## 14910 30 5 2 1
## 14911 31 5 2 1
## 14912 32 5 2 1
## 14913 33 5 2 1
## 14914 34 5 2 1
## 14915 35 5 2 1
## 14916 36 5 2 1
## 14917 37 5 2 1
## 14918 38 5 2 1
## 14919 39 5 2 1
## 14920 40 5 2 1
## 14921 41 5 2 1
## 14922 42 5 2 1
## 14923 43 5 2 1
## 14924 44 5 2 1
## 14925 45 5 2 1
## 14926 46 5 2 1
## 14927 47 5 2 1
## 14928 48 5 2 1
## 14929 49 5 2 1
## 14930 50 5 2 1
## 14931 51 5 2 1
## 14932 52 5 2 1
## 14933 53 5 2 1
## 14934 54 5 2 1
## 14935 55 5 2 1
## 14936 56 5 2 1
## 14937 57 5 2 1
## 14938 58 5 2 1
## 14939 59 5 2 1
## 14940 60 5 2 1
## 14941 61 5 2 1
## 14942 62 5 2 1
## 14943 63 5 2 1
## 14944 64 5 2 1
## 14945 65 5 2 1
## 14946 66 5 2 1
## 14947 67 5 2 1
## 14948 68 5 2 1
## 14949 69 5 2 1
## 14950 70 5 2 1
## 14951 71 5 2 1
## 14952 72 5 2 1
## 14953 73 5 2 1
## 14954 74 5 2 1
## 14955 75 5 2 1
## 14956 76 5 2 1
## 14957 77 5 2 1
## 14958 78 5 2 1
## 14959 79 5 2 1
## 14960 80 5 2 1
## 14961 81 5 2 1
## 14962 82 5 2 1
## 14963 83 5 2 1
## 14964 84 5 2 1
## 14965 85 5 2 1
## 14966 86 5 2 1
## 14967 87 5 2 1
## 14968 88 5 2 1
## 14969 89 5 2 1
## 14970 90 5 2 1
## 14971 91 5 2 1
## 14972 92 5 2 1
## 14973 93 5 2 1
## 14974 94 5 2 1
## 14975 95 5 2 1
## 14976 96 5 2 1
## 14977 97 5 2 1
## 14978 98 5 2 1
## 14979 99 5 2 1
## 14980 100 5 2 1
## 14981 101 5 2 1
## 14982 102 5 2 1
## 14983 103 5 2 1
## 14984 104 5 2 1
## 14985 105 5 2 1
## 14986 106 5 2 1
## 14987 107 5 2 1
## 14988 108 5 2 1
## 14989 109 5 2 1
## 14990 110 5 2 1
## 14991 111 5 2 1
## 14992 112 5 2 1
## 14993 113 5 2 1
## 14994 114 5 2 1
## 14995 115 5 2 1
## 14996 116 5 2 1
## 14997 117 5 2 1
## 14998 118 5 2 1
## 14999 119 5 2 1
## 15000 120 5 2 1
## 15001 1 6 2 1
## 15002 2 6 2 1
## 15003 3 6 2 1
## 15004 4 6 2 1
## 15005 5 6 2 1
## 15006 6 6 2 1
## 15007 7 6 2 1
## 15008 8 6 2 1
## 15009 9 6 2 1
## 15010 10 6 2 1
## 15011 11 6 2 1
## 15012 12 6 2 1
## 15013 13 6 2 1
## 15014 14 6 2 1
## 15015 15 6 2 1
## 15016 16 6 2 1
## 15017 17 6 2 1
## 15018 18 6 2 1
## 15019 19 6 2 1
## 15020 20 6 2 1
## 15021 21 6 2 1
## 15022 22 6 2 1
## 15023 23 6 2 1
## 15024 24 6 2 1
## 15025 25 6 2 1
## 15026 26 6 2 1
## 15027 27 6 2 1
## 15028 28 6 2 1
## 15029 29 6 2 1
## 15030 30 6 2 1
## 15031 31 6 2 1
## 15032 32 6 2 1
## 15033 33 6 2 1
## 15034 34 6 2 1
## 15035 35 6 2 1
## 15036 36 6 2 1
## 15037 37 6 2 1
## 15038 38 6 2 1
## 15039 39 6 2 1
## 15040 40 6 2 1
## 15041 41 6 2 1
## 15042 42 6 2 1
## 15043 43 6 2 1
## 15044 44 6 2 1
## 15045 45 6 2 1
## 15046 46 6 2 1
## 15047 47 6 2 1
## 15048 48 6 2 1
## 15049 49 6 2 1
## 15050 50 6 2 1
## 15051 51 6 2 1
## 15052 52 6 2 1
## 15053 53 6 2 1
## 15054 54 6 2 1
## 15055 55 6 2 1
## 15056 56 6 2 1
## 15057 57 6 2 1
## 15058 58 6 2 1
## 15059 59 6 2 1
## 15060 60 6 2 1
## 15061 61 6 2 1
## 15062 62 6 2 1
## 15063 63 6 2 1
## 15064 64 6 2 1
## 15065 65 6 2 1
## 15066 66 6 2 1
## 15067 67 6 2 1
## 15068 68 6 2 1
## 15069 69 6 2 1
## 15070 70 6 2 1
## 15071 71 6 2 1
## 15072 72 6 2 1
## 15073 73 6 2 1
## 15074 74 6 2 1
## 15075 75 6 2 1
## 15076 76 6 2 1
## 15077 77 6 2 1
## 15078 78 6 2 1
## 15079 79 6 2 1
## 15080 80 6 2 1
## 15081 81 6 2 1
## 15082 82 6 2 1
## 15083 83 6 2 1
## 15084 84 6 2 1
## 15085 85 6 2 1
## 15086 86 6 2 1
## 15087 87 6 2 1
## 15088 88 6 2 1
## 15089 89 6 2 1
## 15090 90 6 2 1
## 15091 91 6 2 1
## 15092 92 6 2 1
## 15093 93 6 2 1
## 15094 94 6 2 1
## 15095 95 6 2 1
## 15096 96 6 2 1
## 15097 97 6 2 1
## 15098 98 6 2 1
## 15099 99 6 2 1
## 15100 100 6 2 1
## 15101 101 6 2 1
## 15102 102 6 2 1
## 15103 103 6 2 1
## 15104 104 6 2 1
## 15105 105 6 2 1
## 15106 106 6 2 1
## 15107 107 6 2 1
## 15108 108 6 2 1
## 15109 109 6 2 1
## 15110 110 6 2 1
## 15111 111 6 2 1
## 15112 112 6 2 1
## 15113 113 6 2 1
## 15114 114 6 2 1
## 15115 115 6 2 1
## 15116 116 6 2 1
## 15117 117 6 2 1
## 15118 118 6 2 1
## 15119 119 6 2 1
## 15120 120 6 2 1
## 15121 1 7 2 1
## 15122 2 7 2 1
## 15123 3 7 2 1
## 15124 4 7 2 1
## 15125 5 7 2 1
## 15126 6 7 2 1
## 15127 7 7 2 1
## 15128 8 7 2 1
## 15129 9 7 2 1
## 15130 10 7 2 1
## 15131 11 7 2 1
## 15132 12 7 2 1
## 15133 13 7 2 1
## 15134 14 7 2 1
## 15135 15 7 2 1
## 15136 16 7 2 1
## 15137 17 7 2 1
## 15138 18 7 2 1
## 15139 19 7 2 1
## 15140 20 7 2 1
## 15141 21 7 2 1
## 15142 22 7 2 1
## 15143 23 7 2 1
## 15144 24 7 2 1
## 15145 25 7 2 1
## 15146 26 7 2 1
## 15147 27 7 2 1
## 15148 28 7 2 1
## 15149 29 7 2 1
## 15150 30 7 2 1
## 15151 31 7 2 1
## 15152 32 7 2 1
## 15153 33 7 2 1
## 15154 34 7 2 1
## 15155 35 7 2 1
## 15156 36 7 2 1
## 15157 37 7 2 1
## 15158 38 7 2 1
## 15159 39 7 2 1
## 15160 40 7 2 1
## 15161 41 7 2 1
## 15162 42 7 2 1
## 15163 43 7 2 1
## 15164 44 7 2 1
## 15165 45 7 2 1
## 15166 46 7 2 1
## 15167 47 7 2 1
## 15168 48 7 2 1
## 15169 49 7 2 1
## 15170 50 7 2 1
## 15171 51 7 2 1
## 15172 52 7 2 1
## 15173 53 7 2 1
## 15174 54 7 2 1
## 15175 55 7 2 1
## 15176 56 7 2 1
## 15177 57 7 2 1
## 15178 58 7 2 1
## 15179 59 7 2 1
## 15180 60 7 2 1
## 15181 61 7 2 1
## 15182 62 7 2 1
## 15183 63 7 2 1
## 15184 64 7 2 1
## 15185 65 7 2 1
## 15186 66 7 2 1
## 15187 67 7 2 1
## 15188 68 7 2 1
## 15189 69 7 2 1
## 15190 70 7 2 1
## 15191 71 7 2 1
## 15192 72 7 2 1
## 15193 73 7 2 1
## 15194 74 7 2 1
## 15195 75 7 2 1
## 15196 76 7 2 1
## 15197 77 7 2 1
## 15198 78 7 2 1
## 15199 79 7 2 1
## 15200 80 7 2 1
## 15201 81 7 2 1
## 15202 82 7 2 1
## 15203 83 7 2 1
## 15204 84 7 2 1
## 15205 85 7 2 1
## 15206 86 7 2 1
## 15207 87 7 2 1
## 15208 88 7 2 1
## 15209 89 7 2 1
## 15210 90 7 2 1
## 15211 91 7 2 1
## 15212 92 7 2 1
## 15213 93 7 2 1
## 15214 94 7 2 1
## 15215 95 7 2 1
## 15216 96 7 2 1
## 15217 97 7 2 1
## 15218 98 7 2 1
## 15219 99 7 2 1
## 15220 100 7 2 1
## 15221 101 7 2 1
## 15222 102 7 2 1
## 15223 103 7 2 1
## 15224 104 7 2 1
## 15225 105 7 2 1
## 15226 106 7 2 1
## 15227 107 7 2 1
## 15228 108 7 2 1
## 15229 109 7 2 1
## 15230 110 7 2 1
## 15231 111 7 2 1
## 15232 112 7 2 1
## 15233 113 7 2 1
## 15234 114 7 2 1
## 15235 115 7 2 1
## 15236 116 7 2 1
## 15237 117 7 2 1
## 15238 118 7 2 1
## 15239 119 7 2 1
## 15240 120 7 2 1
## 15241 1 8 2 1
## 15242 2 8 2 1
## 15243 3 8 2 1
## 15244 4 8 2 1
## 15245 5 8 2 1
## 15246 6 8 2 1
## 15247 7 8 2 1
## 15248 8 8 2 1
## 15249 9 8 2 1
## 15250 10 8 2 1
## 15251 11 8 2 1
## 15252 12 8 2 1
## 15253 13 8 2 1
## 15254 14 8 2 1
## 15255 15 8 2 1
## 15256 16 8 2 1
## 15257 17 8 2 1
## 15258 18 8 2 1
## 15259 19 8 2 1
## 15260 20 8 2 1
## 15261 21 8 2 1
## 15262 22 8 2 1
## 15263 23 8 2 1
## 15264 24 8 2 1
## 15265 25 8 2 1
## 15266 26 8 2 1
## 15267 27 8 2 1
## 15268 28 8 2 1
## 15269 29 8 2 1
## 15270 30 8 2 1
## 15271 31 8 2 1
## 15272 32 8 2 1
## 15273 33 8 2 1
## 15274 34 8 2 1
## 15275 35 8 2 1
## 15276 36 8 2 1
## 15277 37 8 2 1
## 15278 38 8 2 1
## 15279 39 8 2 1
## 15280 40 8 2 1
## 15281 41 8 2 1
## 15282 42 8 2 1
## 15283 43 8 2 1
## 15284 44 8 2 1
## 15285 45 8 2 1
## 15286 46 8 2 1
## 15287 47 8 2 1
## 15288 48 8 2 1
## 15289 49 8 2 1
## 15290 50 8 2 1
## 15291 51 8 2 1
## 15292 52 8 2 1
## 15293 53 8 2 1
## 15294 54 8 2 1
## 15295 55 8 2 1
## 15296 56 8 2 1
## 15297 57 8 2 1
## 15298 58 8 2 1
## 15299 59 8 2 1
## 15300 60 8 2 1
## 15301 61 8 2 1
## 15302 62 8 2 1
## 15303 63 8 2 1
## 15304 64 8 2 1
## 15305 65 8 2 1
## 15306 66 8 2 1
## 15307 67 8 2 1
## 15308 68 8 2 1
## 15309 69 8 2 1
## 15310 70 8 2 1
## 15311 71 8 2 1
## 15312 72 8 2 1
## 15313 73 8 2 1
## 15314 74 8 2 1
## 15315 75 8 2 1
## 15316 76 8 2 1
## 15317 77 8 2 1
## 15318 78 8 2 1
## 15319 79 8 2 1
## 15320 80 8 2 1
## 15321 81 8 2 1
## 15322 82 8 2 1
## 15323 83 8 2 1
## 15324 84 8 2 1
## 15325 85 8 2 1
## 15326 86 8 2 1
## 15327 87 8 2 1
## 15328 88 8 2 1
## 15329 89 8 2 1
## 15330 90 8 2 1
## 15331 91 8 2 1
## 15332 92 8 2 1
## 15333 93 8 2 1
## 15334 94 8 2 1
## 15335 95 8 2 1
## 15336 96 8 2 1
## 15337 97 8 2 1
## 15338 98 8 2 1
## 15339 99 8 2 1
## 15340 100 8 2 1
## 15341 101 8 2 1
## 15342 102 8 2 1
## 15343 103 8 2 1
## 15344 104 8 2 1
## 15345 105 8 2 1
## 15346 106 8 2 1
## 15347 107 8 2 1
## 15348 108 8 2 1
## 15349 109 8 2 1
## 15350 110 8 2 1
## 15351 111 8 2 1
## 15352 112 8 2 1
## 15353 113 8 2 1
## 15354 114 8 2 1
## 15355 115 8 2 1
## 15356 116 8 2 1
## 15357 117 8 2 1
## 15358 118 8 2 1
## 15359 119 8 2 1
## 15360 120 8 2 1
## 15361 1 9 2 1
## 15362 2 9 2 1
## 15363 3 9 2 1
## 15364 4 9 2 1
## 15365 5 9 2 1
## 15366 6 9 2 1
## 15367 7 9 2 1
## 15368 8 9 2 1
## 15369 9 9 2 1
## 15370 10 9 2 1
## 15371 11 9 2 1
## 15372 12 9 2 1
## 15373 13 9 2 1
## 15374 14 9 2 1
## 15375 15 9 2 1
## 15376 16 9 2 1
## 15377 17 9 2 1
## 15378 18 9 2 1
## 15379 19 9 2 1
## 15380 20 9 2 1
## 15381 21 9 2 1
## 15382 22 9 2 1
## 15383 23 9 2 1
## 15384 24 9 2 1
## 15385 25 9 2 1
## 15386 26 9 2 1
## 15387 27 9 2 1
## 15388 28 9 2 1
## 15389 29 9 2 1
## 15390 30 9 2 1
## 15391 31 9 2 1
## 15392 32 9 2 1
## 15393 33 9 2 1
## 15394 34 9 2 1
## 15395 35 9 2 1
## 15396 36 9 2 1
## 15397 37 9 2 1
## 15398 38 9 2 1
## 15399 39 9 2 1
## 15400 40 9 2 1
## 15401 41 9 2 1
## 15402 42 9 2 1
## 15403 43 9 2 1
## 15404 44 9 2 1
## 15405 45 9 2 1
## 15406 46 9 2 1
## 15407 47 9 2 1
## 15408 48 9 2 1
## 15409 49 9 2 1
## 15410 50 9 2 1
## 15411 51 9 2 1
## 15412 52 9 2 1
## 15413 53 9 2 1
## 15414 54 9 2 1
## 15415 55 9 2 1
## 15416 56 9 2 1
## 15417 57 9 2 1
## 15418 58 9 2 1
## 15419 59 9 2 1
## 15420 60 9 2 1
## 15421 61 9 2 1
## 15422 62 9 2 1
## 15423 63 9 2 1
## 15424 64 9 2 1
## 15425 65 9 2 1
## 15426 66 9 2 1
## 15427 67 9 2 1
## 15428 68 9 2 1
## 15429 69 9 2 1
## 15430 70 9 2 1
## 15431 71 9 2 1
## 15432 72 9 2 1
## 15433 73 9 2 1
## 15434 74 9 2 1
## 15435 75 9 2 1
## 15436 76 9 2 1
## 15437 77 9 2 1
## 15438 78 9 2 1
## 15439 79 9 2 1
## 15440 80 9 2 1
## 15441 81 9 2 1
## 15442 82 9 2 1
## 15443 83 9 2 1
## 15444 84 9 2 1
## 15445 85 9 2 1
## 15446 86 9 2 1
## 15447 87 9 2 1
## 15448 88 9 2 1
## 15449 89 9 2 1
## 15450 90 9 2 1
## 15451 91 9 2 1
## 15452 92 9 2 1
## 15453 93 9 2 1
## 15454 94 9 2 1
## 15455 95 9 2 1
## 15456 96 9 2 1
## 15457 97 9 2 1
## 15458 98 9 2 1
## 15459 99 9 2 1
## 15460 100 9 2 1
## 15461 101 9 2 1
## 15462 102 9 2 1
## 15463 103 9 2 1
## 15464 104 9 2 1
## 15465 105 9 2 1
## 15466 106 9 2 1
## 15467 107 9 2 1
## 15468 108 9 2 1
## 15469 109 9 2 1
## 15470 110 9 2 1
## 15471 111 9 2 1
## 15472 112 9 2 1
## 15473 113 9 2 1
## 15474 114 9 2 1
## 15475 115 9 2 1
## 15476 116 9 2 1
## 15477 117 9 2 1
## 15478 118 9 2 1
## 15479 119 9 2 1
## 15480 120 9 2 1
## 15481 1 10 2 1
## 15482 2 10 2 1
## 15483 3 10 2 1
## 15484 4 10 2 1
## 15485 5 10 2 1
## 15486 6 10 2 1
## 15487 7 10 2 1
## 15488 8 10 2 1
## 15489 9 10 2 1
## 15490 10 10 2 1
## 15491 11 10 2 1
## 15492 12 10 2 1
## 15493 13 10 2 1
## 15494 14 10 2 1
## 15495 15 10 2 1
## 15496 16 10 2 1
## 15497 17 10 2 1
## 15498 18 10 2 1
## 15499 19 10 2 1
## 15500 20 10 2 1
## 15501 21 10 2 1
## 15502 22 10 2 1
## 15503 23 10 2 1
## 15504 24 10 2 1
## 15505 25 10 2 1
## 15506 26 10 2 1
## 15507 27 10 2 1
## 15508 28 10 2 1
## 15509 29 10 2 1
## 15510 30 10 2 1
## 15511 31 10 2 1
## 15512 32 10 2 1
## 15513 33 10 2 1
## 15514 34 10 2 1
## 15515 35 10 2 1
## 15516 36 10 2 1
## 15517 37 10 2 1
## 15518 38 10 2 1
## 15519 39 10 2 1
## 15520 40 10 2 1
## 15521 41 10 2 1
## 15522 42 10 2 1
## 15523 43 10 2 1
## 15524 44 10 2 1
## 15525 45 10 2 1
## 15526 46 10 2 1
## 15527 47 10 2 1
## 15528 48 10 2 1
## 15529 49 10 2 1
## 15530 50 10 2 1
## 15531 51 10 2 1
## 15532 52 10 2 1
## 15533 53 10 2 1
## 15534 54 10 2 1
## 15535 55 10 2 1
## 15536 56 10 2 1
## 15537 57 10 2 1
## 15538 58 10 2 1
## 15539 59 10 2 1
## 15540 60 10 2 1
## 15541 61 10 2 1
## 15542 62 10 2 1
## 15543 63 10 2 1
## 15544 64 10 2 1
## 15545 65 10 2 1
## 15546 66 10 2 1
## 15547 67 10 2 1
## 15548 68 10 2 1
## 15549 69 10 2 1
## 15550 70 10 2 1
## 15551 71 10 2 1
## 15552 72 10 2 1
## 15553 73 10 2 1
## 15554 74 10 2 1
## 15555 75 10 2 1
## 15556 76 10 2 1
## 15557 77 10 2 1
## 15558 78 10 2 1
## 15559 79 10 2 1
## 15560 80 10 2 1
## 15561 81 10 2 1
## 15562 82 10 2 1
## 15563 83 10 2 1
## 15564 84 10 2 1
## 15565 85 10 2 1
## 15566 86 10 2 1
## 15567 87 10 2 1
## 15568 88 10 2 1
## 15569 89 10 2 1
## 15570 90 10 2 1
## 15571 91 10 2 1
## 15572 92 10 2 1
## 15573 93 10 2 1
## 15574 94 10 2 1
## 15575 95 10 2 1
## 15576 96 10 2 1
## 15577 97 10 2 1
## 15578 98 10 2 1
## 15579 99 10 2 1
## 15580 100 10 2 1
## 15581 101 10 2 1
## 15582 102 10 2 1
## 15583 103 10 2 1
## 15584 104 10 2 1
## 15585 105 10 2 1
## 15586 106 10 2 1
## 15587 107 10 2 1
## 15588 108 10 2 1
## 15589 109 10 2 1
## 15590 110 10 2 1
## 15591 111 10 2 1
## 15592 112 10 2 1
## 15593 113 10 2 1
## 15594 114 10 2 1
## 15595 115 10 2 1
## 15596 116 10 2 1
## 15597 117 10 2 1
## 15598 118 10 2 1
## 15599 119 10 2 1
## 15600 120 10 2 1
## 15601 1 11 2 1
## 15602 2 11 2 1
## 15603 3 11 2 1
## 15604 4 11 2 1
## 15605 5 11 2 1
## 15606 6 11 2 1
## 15607 7 11 2 1
## 15608 8 11 2 1
## 15609 9 11 2 1
## 15610 10 11 2 1
## 15611 11 11 2 1
## 15612 12 11 2 1
## 15613 13 11 2 1
## 15614 14 11 2 1
## 15615 15 11 2 1
## 15616 16 11 2 1
## 15617 17 11 2 1
## 15618 18 11 2 1
## 15619 19 11 2 1
## 15620 20 11 2 1
## 15621 21 11 2 1
## 15622 22 11 2 1
## 15623 23 11 2 1
## 15624 24 11 2 1
## 15625 25 11 2 1
## 15626 26 11 2 1
## 15627 27 11 2 1
## 15628 28 11 2 1
## 15629 29 11 2 1
## 15630 30 11 2 1
## 15631 31 11 2 1
## 15632 32 11 2 1
## 15633 33 11 2 1
## 15634 34 11 2 1
## 15635 35 11 2 1
## 15636 36 11 2 1
## 15637 37 11 2 1
## 15638 38 11 2 1
## 15639 39 11 2 1
## 15640 40 11 2 1
## 15641 41 11 2 1
## 15642 42 11 2 1
## 15643 43 11 2 1
## 15644 44 11 2 1
## 15645 45 11 2 1
## 15646 46 11 2 1
## 15647 47 11 2 1
## 15648 48 11 2 1
## 15649 49 11 2 1
## 15650 50 11 2 1
## 15651 51 11 2 1
## 15652 52 11 2 1
## 15653 53 11 2 1
## 15654 54 11 2 1
## 15655 55 11 2 1
## 15656 56 11 2 1
## 15657 57 11 2 1
## 15658 58 11 2 1
## 15659 59 11 2 1
## 15660 60 11 2 1
## 15661 61 11 2 1
## 15662 62 11 2 1
## 15663 63 11 2 1
## 15664 64 11 2 1
## 15665 65 11 2 1
## 15666 66 11 2 1
## 15667 67 11 2 1
## 15668 68 11 2 1
## 15669 69 11 2 1
## 15670 70 11 2 1
## 15671 71 11 2 1
## 15672 72 11 2 1
## 15673 73 11 2 1
## 15674 74 11 2 1
## 15675 75 11 2 1
## 15676 76 11 2 1
## 15677 77 11 2 1
## 15678 78 11 2 1
## 15679 79 11 2 1
## 15680 80 11 2 1
## 15681 81 11 2 1
## 15682 82 11 2 1
## 15683 83 11 2 1
## 15684 84 11 2 1
## 15685 85 11 2 1
## 15686 86 11 2 1
## 15687 87 11 2 1
## 15688 88 11 2 1
## 15689 89 11 2 1
## 15690 90 11 2 1
## 15691 91 11 2 1
## 15692 92 11 2 1
## 15693 93 11 2 1
## 15694 94 11 2 1
## 15695 95 11 2 1
## 15696 96 11 2 1
## 15697 97 11 2 1
## 15698 98 11 2 1
## 15699 99 11 2 1
## 15700 100 11 2 1
## 15701 101 11 2 1
## 15702 102 11 2 1
## 15703 103 11 2 1
## 15704 104 11 2 1
## 15705 105 11 2 1
## 15706 106 11 2 1
## 15707 107 11 2 1
## 15708 108 11 2 1
## 15709 109 11 2 1
## 15710 110 11 2 1
## 15711 111 11 2 1
## 15712 112 11 2 1
## 15713 113 11 2 1
## 15714 114 11 2 1
## 15715 115 11 2 1
## 15716 116 11 2 1
## 15717 117 11 2 1
## 15718 118 11 2 1
## 15719 119 11 2 1
## 15720 120 11 2 1
## 15721 1 12 2 1
## 15722 2 12 2 1
## 15723 3 12 2 1
## 15724 4 12 2 1
## 15725 5 12 2 1
## 15726 6 12 2 1
## 15727 7 12 2 1
## 15728 8 12 2 1
## 15729 9 12 2 1
## 15730 10 12 2 1
## 15731 11 12 2 1
## 15732 12 12 2 1
## 15733 13 12 2 1
## 15734 14 12 2 1
## 15735 15 12 2 1
## 15736 16 12 2 1
## 15737 17 12 2 1
## 15738 18 12 2 1
## 15739 19 12 2 1
## 15740 20 12 2 1
## 15741 21 12 2 1
## 15742 22 12 2 1
## 15743 23 12 2 1
## 15744 24 12 2 1
## 15745 25 12 2 1
## 15746 26 12 2 1
## 15747 27 12 2 1
## 15748 28 12 2 1
## 15749 29 12 2 1
## 15750 30 12 2 1
## 15751 31 12 2 1
## 15752 32 12 2 1
## 15753 33 12 2 1
## 15754 34 12 2 1
## 15755 35 12 2 1
## 15756 36 12 2 1
## 15757 37 12 2 1
## 15758 38 12 2 1
## 15759 39 12 2 1
## 15760 40 12 2 1
## 15761 41 12 2 1
## 15762 42 12 2 1
## 15763 43 12 2 1
## 15764 44 12 2 1
## 15765 45 12 2 1
## 15766 46 12 2 1
## 15767 47 12 2 1
## 15768 48 12 2 1
## 15769 49 12 2 1
## 15770 50 12 2 1
## 15771 51 12 2 1
## 15772 52 12 2 1
## 15773 53 12 2 1
## 15774 54 12 2 1
## 15775 55 12 2 1
## 15776 56 12 2 1
## 15777 57 12 2 1
## 15778 58 12 2 1
## 15779 59 12 2 1
## 15780 60 12 2 1
## 15781 61 12 2 1
## 15782 62 12 2 1
## 15783 63 12 2 1
## 15784 64 12 2 1
## 15785 65 12 2 1
## 15786 66 12 2 1
## 15787 67 12 2 1
## 15788 68 12 2 1
## 15789 69 12 2 1
## 15790 70 12 2 1
## 15791 71 12 2 1
## 15792 72 12 2 1
## 15793 73 12 2 1
## 15794 74 12 2 1
## 15795 75 12 2 1
## 15796 76 12 2 1
## 15797 77 12 2 1
## 15798 78 12 2 1
## 15799 79 12 2 1
## 15800 80 12 2 1
## 15801 81 12 2 1
## 15802 82 12 2 1
## 15803 83 12 2 1
## 15804 84 12 2 1
## 15805 85 12 2 1
## 15806 86 12 2 1
## 15807 87 12 2 1
## 15808 88 12 2 1
## 15809 89 12 2 1
## 15810 90 12 2 1
## 15811 91 12 2 1
## 15812 92 12 2 1
## 15813 93 12 2 1
## 15814 94 12 2 1
## 15815 95 12 2 1
## 15816 96 12 2 1
## 15817 97 12 2 1
## 15818 98 12 2 1
## 15819 99 12 2 1
## 15820 100 12 2 1
## 15821 101 12 2 1
## 15822 102 12 2 1
## 15823 103 12 2 1
## 15824 104 12 2 1
## 15825 105 12 2 1
## 15826 106 12 2 1
## 15827 107 12 2 1
## 15828 108 12 2 1
## 15829 109 12 2 1
## 15830 110 12 2 1
## 15831 111 12 2 1
## 15832 112 12 2 1
## 15833 113 12 2 1
## 15834 114 12 2 1
## 15835 115 12 2 1
## 15836 116 12 2 1
## 15837 117 12 2 1
## 15838 118 12 2 1
## 15839 119 12 2 1
## 15840 120 12 2 1
## 15841 1 13 2 1
## 15842 2 13 2 1
## 15843 3 13 2 1
## 15844 4 13 2 1
## 15845 5 13 2 1
## 15846 6 13 2 1
## 15847 7 13 2 1
## 15848 8 13 2 1
## 15849 9 13 2 1
## 15850 10 13 2 1
## 15851 11 13 2 1
## 15852 12 13 2 1
## 15853 13 13 2 1
## 15854 14 13 2 1
## 15855 15 13 2 1
## 15856 16 13 2 1
## 15857 17 13 2 1
## 15858 18 13 2 1
## 15859 19 13 2 1
## 15860 20 13 2 1
## 15861 21 13 2 1
## 15862 22 13 2 1
## 15863 23 13 2 1
## 15864 24 13 2 1
## 15865 25 13 2 1
## 15866 26 13 2 1
## 15867 27 13 2 1
## 15868 28 13 2 1
## 15869 29 13 2 1
## 15870 30 13 2 1
## 15871 31 13 2 1
## 15872 32 13 2 1
## 15873 33 13 2 1
## 15874 34 13 2 1
## 15875 35 13 2 1
## 15876 36 13 2 1
## 15877 37 13 2 1
## 15878 38 13 2 1
## 15879 39 13 2 1
## 15880 40 13 2 1
## 15881 41 13 2 1
## 15882 42 13 2 1
## 15883 43 13 2 1
## 15884 44 13 2 1
## 15885 45 13 2 1
## 15886 46 13 2 1
## 15887 47 13 2 1
## 15888 48 13 2 1
## 15889 49 13 2 1
## 15890 50 13 2 1
## 15891 51 13 2 1
## 15892 52 13 2 1
## 15893 53 13 2 1
## 15894 54 13 2 1
## 15895 55 13 2 1
## 15896 56 13 2 1
## 15897 57 13 2 1
## 15898 58 13 2 1
## 15899 59 13 2 1
## 15900 60 13 2 1
## 15901 61 13 2 1
## 15902 62 13 2 1
## 15903 63 13 2 1
## 15904 64 13 2 1
## 15905 65 13 2 1
## 15906 66 13 2 1
## 15907 67 13 2 1
## 15908 68 13 2 1
## 15909 69 13 2 1
## 15910 70 13 2 1
## 15911 71 13 2 1
## 15912 72 13 2 1
## 15913 73 13 2 1
## 15914 74 13 2 1
## 15915 75 13 2 1
## 15916 76 13 2 1
## 15917 77 13 2 1
## 15918 78 13 2 1
## 15919 79 13 2 1
## 15920 80 13 2 1
## 15921 81 13 2 1
## 15922 82 13 2 1
## 15923 83 13 2 1
## 15924 84 13 2 1
## 15925 85 13 2 1
## 15926 86 13 2 1
## 15927 87 13 2 1
## 15928 88 13 2 1
## 15929 89 13 2 1
## 15930 90 13 2 1
## 15931 91 13 2 1
## 15932 92 13 2 1
## 15933 93 13 2 1
## 15934 94 13 2 1
## 15935 95 13 2 1
## 15936 96 13 2 1
## 15937 97 13 2 1
## 15938 98 13 2 1
## 15939 99 13 2 1
## 15940 100 13 2 1
## 15941 101 13 2 1
## 15942 102 13 2 1
## 15943 103 13 2 1
## 15944 104 13 2 1
## 15945 105 13 2 1
## 15946 106 13 2 1
## 15947 107 13 2 1
## 15948 108 13 2 1
## 15949 109 13 2 1
## 15950 110 13 2 1
## 15951 111 13 2 1
## 15952 112 13 2 1
## 15953 113 13 2 1
## 15954 114 13 2 1
## 15955 115 13 2 1
## 15956 116 13 2 1
## 15957 117 13 2 1
## 15958 118 13 2 1
## 15959 119 13 2 1
## 15960 120 13 2 1
## 15961 1 14 2 1
## 15962 2 14 2 1
## 15963 3 14 2 1
## 15964 4 14 2 1
## 15965 5 14 2 1
## 15966 6 14 2 1
## 15967 7 14 2 1
## 15968 8 14 2 1
## 15969 9 14 2 1
## 15970 10 14 2 1
## 15971 11 14 2 1
## 15972 12 14 2 1
## 15973 13 14 2 1
## 15974 14 14 2 1
## 15975 15 14 2 1
## 15976 16 14 2 1
## 15977 17 14 2 1
## 15978 18 14 2 1
## 15979 19 14 2 1
## 15980 20 14 2 1
## 15981 21 14 2 1
## 15982 22 14 2 1
## 15983 23 14 2 1
## 15984 24 14 2 1
## 15985 25 14 2 1
## 15986 26 14 2 1
## 15987 27 14 2 1
## 15988 28 14 2 1
## 15989 29 14 2 1
## 15990 30 14 2 1
## 15991 31 14 2 1
## 15992 32 14 2 1
## 15993 33 14 2 1
## 15994 34 14 2 1
## 15995 35 14 2 1
## 15996 36 14 2 1
## 15997 37 14 2 1
## 15998 38 14 2 1
## 15999 39 14 2 1
## 16000 40 14 2 1
## 16001 41 14 2 1
## 16002 42 14 2 1
## 16003 43 14 2 1
## 16004 44 14 2 1
## 16005 45 14 2 1
## 16006 46 14 2 1
## 16007 47 14 2 1
## 16008 48 14 2 1
## 16009 49 14 2 1
## 16010 50 14 2 1
## 16011 51 14 2 1
## 16012 52 14 2 1
## 16013 53 14 2 1
## 16014 54 14 2 1
## 16015 55 14 2 1
## 16016 56 14 2 1
## 16017 57 14 2 1
## 16018 58 14 2 1
## 16019 59 14 2 1
## 16020 60 14 2 1
## 16021 61 14 2 1
## 16022 62 14 2 1
## 16023 63 14 2 1
## 16024 64 14 2 1
## 16025 65 14 2 1
## 16026 66 14 2 1
## 16027 67 14 2 1
## 16028 68 14 2 1
## 16029 69 14 2 1
## 16030 70 14 2 1
## 16031 71 14 2 1
## 16032 72 14 2 1
## 16033 73 14 2 1
## 16034 74 14 2 1
## 16035 75 14 2 1
## 16036 76 14 2 1
## 16037 77 14 2 1
## 16038 78 14 2 1
## 16039 79 14 2 1
## 16040 80 14 2 1
## 16041 81 14 2 1
## 16042 82 14 2 1
## 16043 83 14 2 1
## 16044 84 14 2 1
## 16045 85 14 2 1
## 16046 86 14 2 1
## 16047 87 14 2 1
## 16048 88 14 2 1
## 16049 89 14 2 1
## 16050 90 14 2 1
## 16051 91 14 2 1
## 16052 92 14 2 1
## 16053 93 14 2 1
## 16054 94 14 2 1
## 16055 95 14 2 1
## 16056 96 14 2 1
## 16057 97 14 2 1
## 16058 98 14 2 1
## 16059 99 14 2 1
## 16060 100 14 2 1
## 16061 101 14 2 1
## 16062 102 14 2 1
## 16063 103 14 2 1
## 16064 104 14 2 1
## 16065 105 14 2 1
## 16066 106 14 2 1
## 16067 107 14 2 1
## 16068 108 14 2 1
## 16069 109 14 2 1
## 16070 110 14 2 1
## 16071 111 14 2 1
## 16072 112 14 2 1
## 16073 113 14 2 1
## 16074 114 14 2 1
## 16075 115 14 2 1
## 16076 116 14 2 1
## 16077 117 14 2 1
## 16078 118 14 2 1
## 16079 119 14 2 1
## 16080 120 14 2 1
## 16081 1 15 2 1
## 16082 2 15 2 1
## 16083 3 15 2 1
## 16084 4 15 2 1
## 16085 5 15 2 1
## 16086 6 15 2 1
## 16087 7 15 2 1
## 16088 8 15 2 1
## 16089 9 15 2 1
## 16090 10 15 2 1
## 16091 11 15 2 1
## 16092 12 15 2 1
## 16093 13 15 2 1
## 16094 14 15 2 1
## 16095 15 15 2 1
## 16096 16 15 2 1
## 16097 17 15 2 1
## 16098 18 15 2 1
## 16099 19 15 2 1
## 16100 20 15 2 1
## 16101 21 15 2 1
## 16102 22 15 2 1
## 16103 23 15 2 1
## 16104 24 15 2 1
## 16105 25 15 2 1
## 16106 26 15 2 1
## 16107 27 15 2 1
## 16108 28 15 2 1
## 16109 29 15 2 1
## 16110 30 15 2 1
## 16111 31 15 2 1
## 16112 32 15 2 1
## 16113 33 15 2 1
## 16114 34 15 2 1
## 16115 35 15 2 1
## 16116 36 15 2 1
## 16117 37 15 2 1
## 16118 38 15 2 1
## 16119 39 15 2 1
## 16120 40 15 2 1
## 16121 41 15 2 1
## 16122 42 15 2 1
## 16123 43 15 2 1
## 16124 44 15 2 1
## 16125 45 15 2 1
## 16126 46 15 2 1
## 16127 47 15 2 1
## 16128 48 15 2 1
## 16129 49 15 2 1
## 16130 50 15 2 1
## 16131 51 15 2 1
## 16132 52 15 2 1
## 16133 53 15 2 1
## 16134 54 15 2 1
## 16135 55 15 2 1
## 16136 56 15 2 1
## 16137 57 15 2 1
## 16138 58 15 2 1
## 16139 59 15 2 1
## 16140 60 15 2 1
## 16141 61 15 2 1
## 16142 62 15 2 1
## 16143 63 15 2 1
## 16144 64 15 2 1
## 16145 65 15 2 1
## 16146 66 15 2 1
## 16147 67 15 2 1
## 16148 68 15 2 1
## 16149 69 15 2 1
## 16150 70 15 2 1
## 16151 71 15 2 1
## 16152 72 15 2 1
## 16153 73 15 2 1
## 16154 74 15 2 1
## 16155 75 15 2 1
## 16156 76 15 2 1
## 16157 77 15 2 1
## 16158 78 15 2 1
## 16159 79 15 2 1
## 16160 80 15 2 1
## 16161 81 15 2 1
## 16162 82 15 2 1
## 16163 83 15 2 1
## 16164 84 15 2 1
## 16165 85 15 2 1
## 16166 86 15 2 1
## 16167 87 15 2 1
## 16168 88 15 2 1
## 16169 89 15 2 1
## 16170 90 15 2 1
## 16171 91 15 2 1
## 16172 92 15 2 1
## 16173 93 15 2 1
## 16174 94 15 2 1
## 16175 95 15 2 1
## 16176 96 15 2 1
## 16177 97 15 2 1
## 16178 98 15 2 1
## 16179 99 15 2 1
## 16180 100 15 2 1
## 16181 101 15 2 1
## 16182 102 15 2 1
## 16183 103 15 2 1
## 16184 104 15 2 1
## 16185 105 15 2 1
## 16186 106 15 2 1
## 16187 107 15 2 1
## 16188 108 15 2 1
## 16189 109 15 2 1
## 16190 110 15 2 1
## 16191 111 15 2 1
## 16192 112 15 2 1
## 16193 113 15 2 1
## 16194 114 15 2 1
## 16195 115 15 2 1
## 16196 116 15 2 1
## 16197 117 15 2 1
## 16198 118 15 2 1
## 16199 119 15 2 1
## 16200 120 15 2 1
## 16201 1 16 2 1
## 16202 2 16 2 1
## 16203 3 16 2 1
## 16204 4 16 2 1
## 16205 5 16 2 1
## 16206 6 16 2 1
## 16207 7 16 2 1
## 16208 8 16 2 1
## 16209 9 16 2 1
## 16210 10 16 2 1
## 16211 11 16 2 1
## 16212 12 16 2 1
## 16213 13 16 2 1
## 16214 14 16 2 1
## 16215 15 16 2 1
## 16216 16 16 2 1
## 16217 17 16 2 1
## 16218 18 16 2 1
## 16219 19 16 2 1
## 16220 20 16 2 1
## 16221 21 16 2 1
## 16222 22 16 2 1
## 16223 23 16 2 1
## 16224 24 16 2 1
## 16225 25 16 2 1
## 16226 26 16 2 1
## 16227 27 16 2 1
## 16228 28 16 2 1
## 16229 29 16 2 1
## 16230 30 16 2 1
## 16231 31 16 2 1
## 16232 32 16 2 1
## 16233 33 16 2 1
## 16234 34 16 2 1
## 16235 35 16 2 1
## 16236 36 16 2 1
## 16237 37 16 2 1
## 16238 38 16 2 1
## 16239 39 16 2 1
## 16240 40 16 2 1
## 16241 41 16 2 1
## 16242 42 16 2 1
## 16243 43 16 2 1
## 16244 44 16 2 1
## 16245 45 16 2 1
## 16246 46 16 2 1
## 16247 47 16 2 1
## 16248 48 16 2 1
## 16249 49 16 2 1
## 16250 50 16 2 1
## 16251 51 16 2 1
## 16252 52 16 2 1
## 16253 53 16 2 1
## 16254 54 16 2 1
## 16255 55 16 2 1
## 16256 56 16 2 1
## 16257 57 16 2 1
## 16258 58 16 2 1
## 16259 59 16 2 1
## 16260 60 16 2 1
## 16261 61 16 2 1
## 16262 62 16 2 1
## 16263 63 16 2 1
## 16264 64 16 2 1
## 16265 65 16 2 1
## 16266 66 16 2 1
## 16267 67 16 2 1
## 16268 68 16 2 1
## 16269 69 16 2 1
## 16270 70 16 2 1
## 16271 71 16 2 1
## 16272 72 16 2 1
## 16273 73 16 2 1
## 16274 74 16 2 1
## 16275 75 16 2 1
## 16276 76 16 2 1
## 16277 77 16 2 1
## 16278 78 16 2 1
## 16279 79 16 2 1
## 16280 80 16 2 1
## 16281 81 16 2 1
## 16282 82 16 2 1
## 16283 83 16 2 1
## 16284 84 16 2 1
## 16285 85 16 2 1
## 16286 86 16 2 1
## 16287 87 16 2 1
## 16288 88 16 2 1
## 16289 89 16 2 1
## 16290 90 16 2 1
## 16291 91 16 2 1
## 16292 92 16 2 1
## 16293 93 16 2 1
## 16294 94 16 2 1
## 16295 95 16 2 1
## 16296 96 16 2 1
## 16297 97 16 2 1
## 16298 98 16 2 1
## 16299 99 16 2 1
## 16300 100 16 2 1
## 16301 101 16 2 1
## 16302 102 16 2 1
## 16303 103 16 2 1
## 16304 104 16 2 1
## 16305 105 16 2 1
## 16306 106 16 2 1
## 16307 107 16 2 1
## 16308 108 16 2 1
## 16309 109 16 2 1
## 16310 110 16 2 1
## 16311 111 16 2 1
## 16312 112 16 2 1
## 16313 113 16 2 1
## 16314 114 16 2 1
## 16315 115 16 2 1
## 16316 116 16 2 1
## 16317 117 16 2 1
## 16318 118 16 2 1
## 16319 119 16 2 1
## 16320 120 16 2 1
## 16321 1 17 2 1
## 16322 2 17 2 1
## 16323 3 17 2 1
## 16324 4 17 2 1
## 16325 5 17 2 1
## 16326 6 17 2 1
## 16327 7 17 2 1
## 16328 8 17 2 1
## 16329 9 17 2 1
## 16330 10 17 2 1
## 16331 11 17 2 1
## 16332 12 17 2 1
## 16333 13 17 2 1
## 16334 14 17 2 1
## 16335 15 17 2 1
## 16336 16 17 2 1
## 16337 17 17 2 1
## 16338 18 17 2 1
## 16339 19 17 2 1
## 16340 20 17 2 1
## 16341 21 17 2 1
## 16342 22 17 2 1
## 16343 23 17 2 1
## 16344 24 17 2 1
## 16345 25 17 2 1
## 16346 26 17 2 1
## 16347 27 17 2 1
## 16348 28 17 2 1
## 16349 29 17 2 1
## 16350 30 17 2 1
## 16351 31 17 2 1
## 16352 32 17 2 1
## 16353 33 17 2 1
## 16354 34 17 2 1
## 16355 35 17 2 1
## 16356 36 17 2 1
## 16357 37 17 2 1
## 16358 38 17 2 1
## 16359 39 17 2 1
## 16360 40 17 2 1
## 16361 41 17 2 1
## 16362 42 17 2 1
## 16363 43 17 2 1
## 16364 44 17 2 1
## 16365 45 17 2 1
## 16366 46 17 2 1
## 16367 47 17 2 1
## 16368 48 17 2 1
## 16369 49 17 2 1
## 16370 50 17 2 1
## 16371 51 17 2 1
## 16372 52 17 2 1
## 16373 53 17 2 1
## 16374 54 17 2 1
## 16375 55 17 2 1
## 16376 56 17 2 1
## 16377 57 17 2 1
## 16378 58 17 2 1
## 16379 59 17 2 1
## 16380 60 17 2 1
## 16381 61 17 2 1
## 16382 62 17 2 1
## 16383 63 17 2 1
## 16384 64 17 2 1
## 16385 65 17 2 1
## 16386 66 17 2 1
## 16387 67 17 2 1
## 16388 68 17 2 1
## 16389 69 17 2 1
## 16390 70 17 2 1
## 16391 71 17 2 1
## 16392 72 17 2 1
## 16393 73 17 2 1
## 16394 74 17 2 1
## 16395 75 17 2 1
## 16396 76 17 2 1
## 16397 77 17 2 1
## 16398 78 17 2 1
## 16399 79 17 2 1
## 16400 80 17 2 1
## 16401 81 17 2 1
## 16402 82 17 2 1
## 16403 83 17 2 1
## 16404 84 17 2 1
## 16405 85 17 2 1
## 16406 86 17 2 1
## 16407 87 17 2 1
## 16408 88 17 2 1
## 16409 89 17 2 1
## 16410 90 17 2 1
## 16411 91 17 2 1
## 16412 92 17 2 1
## 16413 93 17 2 1
## 16414 94 17 2 1
## 16415 95 17 2 1
## 16416 96 17 2 1
## 16417 97 17 2 1
## 16418 98 17 2 1
## 16419 99 17 2 1
## 16420 100 17 2 1
## 16421 101 17 2 1
## 16422 102 17 2 1
## 16423 103 17 2 1
## 16424 104 17 2 1
## 16425 105 17 2 1
## 16426 106 17 2 1
## 16427 107 17 2 1
## 16428 108 17 2 1
## 16429 109 17 2 1
## 16430 110 17 2 1
## 16431 111 17 2 1
## 16432 112 17 2 1
## 16433 113 17 2 1
## 16434 114 17 2 1
## 16435 115 17 2 1
## 16436 116 17 2 1
## 16437 117 17 2 1
## 16438 118 17 2 1
## 16439 119 17 2 1
## 16440 120 17 2 1
## 16441 1 18 2 1
## 16442 2 18 2 1
## 16443 3 18 2 1
## 16444 4 18 2 1
## 16445 5 18 2 1
## 16446 6 18 2 1
## 16447 7 18 2 1
## 16448 8 18 2 1
## 16449 9 18 2 1
## 16450 10 18 2 1
## 16451 11 18 2 1
## 16452 12 18 2 1
## 16453 13 18 2 1
## 16454 14 18 2 1
## 16455 15 18 2 1
## 16456 16 18 2 1
## 16457 17 18 2 1
## 16458 18 18 2 1
## 16459 19 18 2 1
## 16460 20 18 2 1
## 16461 21 18 2 1
## 16462 22 18 2 1
## 16463 23 18 2 1
## 16464 24 18 2 1
## 16465 25 18 2 1
## 16466 26 18 2 1
## 16467 27 18 2 1
## 16468 28 18 2 1
## 16469 29 18 2 1
## 16470 30 18 2 1
## 16471 31 18 2 1
## 16472 32 18 2 1
## 16473 33 18 2 1
## 16474 34 18 2 1
## 16475 35 18 2 1
## 16476 36 18 2 1
## 16477 37 18 2 1
## 16478 38 18 2 1
## 16479 39 18 2 1
## 16480 40 18 2 1
## 16481 41 18 2 1
## 16482 42 18 2 1
## 16483 43 18 2 1
## 16484 44 18 2 1
## 16485 45 18 2 1
## 16486 46 18 2 1
## 16487 47 18 2 1
## 16488 48 18 2 1
## 16489 49 18 2 1
## 16490 50 18 2 1
## 16491 51 18 2 1
## 16492 52 18 2 1
## 16493 53 18 2 1
## 16494 54 18 2 1
## 16495 55 18 2 1
## 16496 56 18 2 1
## 16497 57 18 2 1
## 16498 58 18 2 1
## 16499 59 18 2 1
## 16500 60 18 2 1
## 16501 61 18 2 1
## 16502 62 18 2 1
## 16503 63 18 2 1
## 16504 64 18 2 1
## 16505 65 18 2 1
## 16506 66 18 2 1
## 16507 67 18 2 1
## 16508 68 18 2 1
## 16509 69 18 2 1
## 16510 70 18 2 1
## 16511 71 18 2 1
## 16512 72 18 2 1
## 16513 73 18 2 1
## 16514 74 18 2 1
## 16515 75 18 2 1
## 16516 76 18 2 1
## 16517 77 18 2 1
## 16518 78 18 2 1
## 16519 79 18 2 1
## 16520 80 18 2 1
## 16521 81 18 2 1
## 16522 82 18 2 1
## 16523 83 18 2 1
## 16524 84 18 2 1
## 16525 85 18 2 1
## 16526 86 18 2 1
## 16527 87 18 2 1
## 16528 88 18 2 1
## 16529 89 18 2 1
## 16530 90 18 2 1
## 16531 91 18 2 1
## 16532 92 18 2 1
## 16533 93 18 2 1
## 16534 94 18 2 1
## 16535 95 18 2 1
## 16536 96 18 2 1
## 16537 97 18 2 1
## 16538 98 18 2 1
## 16539 99 18 2 1
## 16540 100 18 2 1
## 16541 101 18 2 1
## 16542 102 18 2 1
## 16543 103 18 2 1
## 16544 104 18 2 1
## 16545 105 18 2 1
## 16546 106 18 2 1
## 16547 107 18 2 1
## 16548 108 18 2 1
## 16549 109 18 2 1
## 16550 110 18 2 1
## 16551 111 18 2 1
## 16552 112 18 2 1
## 16553 113 18 2 1
## 16554 114 18 2 1
## 16555 115 18 2 1
## 16556 116 18 2 1
## 16557 117 18 2 1
## 16558 118 18 2 1
## 16559 119 18 2 1
## 16560 120 18 2 1
## 16561 1 19 2 1
## 16562 2 19 2 1
## 16563 3 19 2 1
## 16564 4 19 2 1
## 16565 5 19 2 1
## 16566 6 19 2 1
## 16567 7 19 2 1
## 16568 8 19 2 1
## 16569 9 19 2 1
## 16570 10 19 2 1
## 16571 11 19 2 1
## 16572 12 19 2 1
## 16573 13 19 2 1
## 16574 14 19 2 1
## 16575 15 19 2 1
## 16576 16 19 2 1
## 16577 17 19 2 1
## 16578 18 19 2 1
## 16579 19 19 2 1
## 16580 20 19 2 1
## 16581 21 19 2 1
## 16582 22 19 2 1
## 16583 23 19 2 1
## 16584 24 19 2 1
## 16585 25 19 2 1
## 16586 26 19 2 1
## 16587 27 19 2 1
## 16588 28 19 2 1
## 16589 29 19 2 1
## 16590 30 19 2 1
## 16591 31 19 2 1
## 16592 32 19 2 1
## 16593 33 19 2 1
## 16594 34 19 2 1
## 16595 35 19 2 1
## 16596 36 19 2 1
## 16597 37 19 2 1
## 16598 38 19 2 1
## 16599 39 19 2 1
## 16600 40 19 2 1
## 16601 41 19 2 1
## 16602 42 19 2 1
## 16603 43 19 2 1
## 16604 44 19 2 1
## 16605 45 19 2 1
## 16606 46 19 2 1
## 16607 47 19 2 1
## 16608 48 19 2 1
## 16609 49 19 2 1
## 16610 50 19 2 1
## 16611 51 19 2 1
## 16612 52 19 2 1
## 16613 53 19 2 1
## 16614 54 19 2 1
## 16615 55 19 2 1
## 16616 56 19 2 1
## 16617 57 19 2 1
## 16618 58 19 2 1
## 16619 59 19 2 1
## 16620 60 19 2 1
## 16621 61 19 2 1
## 16622 62 19 2 1
## 16623 63 19 2 1
## 16624 64 19 2 1
## 16625 65 19 2 1
## 16626 66 19 2 1
## 16627 67 19 2 1
## 16628 68 19 2 1
## 16629 69 19 2 1
## 16630 70 19 2 1
## 16631 71 19 2 1
## 16632 72 19 2 1
## 16633 73 19 2 1
## 16634 74 19 2 1
## 16635 75 19 2 1
## 16636 76 19 2 1
## 16637 77 19 2 1
## 16638 78 19 2 1
## 16639 79 19 2 1
## 16640 80 19 2 1
## 16641 81 19 2 1
## 16642 82 19 2 1
## 16643 83 19 2 1
## 16644 84 19 2 1
## 16645 85 19 2 1
## 16646 86 19 2 1
## 16647 87 19 2 1
## 16648 88 19 2 1
## 16649 89 19 2 1
## 16650 90 19 2 1
## 16651 91 19 2 1
## 16652 92 19 2 1
## 16653 93 19 2 1
## 16654 94 19 2 1
## 16655 95 19 2 1
## 16656 96 19 2 1
## 16657 97 19 2 1
## 16658 98 19 2 1
## 16659 99 19 2 1
## 16660 100 19 2 1
## 16661 101 19 2 1
## 16662 102 19 2 1
## 16663 103 19 2 1
## 16664 104 19 2 1
## 16665 105 19 2 1
## 16666 106 19 2 1
## 16667 107 19 2 1
## 16668 108 19 2 1
## 16669 109 19 2 1
## 16670 110 19 2 1
## 16671 111 19 2 1
## 16672 112 19 2 1
## 16673 113 19 2 1
## 16674 114 19 2 1
## 16675 115 19 2 1
## 16676 116 19 2 1
## 16677 117 19 2 1
## 16678 118 19 2 1
## 16679 119 19 2 1
## 16680 120 19 2 1
## 16681 1 20 2 1
## 16682 2 20 2 1
## 16683 3 20 2 1
## 16684 4 20 2 1
## 16685 5 20 2 1
## 16686 6 20 2 1
## 16687 7 20 2 1
## 16688 8 20 2 1
## 16689 9 20 2 1
## 16690 10 20 2 1
## 16691 11 20 2 1
## 16692 12 20 2 1
## 16693 13 20 2 1
## 16694 14 20 2 1
## 16695 15 20 2 1
## 16696 16 20 2 1
## 16697 17 20 2 1
## 16698 18 20 2 1
## 16699 19 20 2 1
## 16700 20 20 2 1
## 16701 21 20 2 1
## 16702 22 20 2 1
## 16703 23 20 2 1
## 16704 24 20 2 1
## 16705 25 20 2 1
## 16706 26 20 2 1
## 16707 27 20 2 1
## 16708 28 20 2 1
## 16709 29 20 2 1
## 16710 30 20 2 1
## 16711 31 20 2 1
## 16712 32 20 2 1
## 16713 33 20 2 1
## 16714 34 20 2 1
## 16715 35 20 2 1
## 16716 36 20 2 1
## 16717 37 20 2 1
## 16718 38 20 2 1
## 16719 39 20 2 1
## 16720 40 20 2 1
## 16721 41 20 2 1
## 16722 42 20 2 1
## 16723 43 20 2 1
## 16724 44 20 2 1
## 16725 45 20 2 1
## 16726 46 20 2 1
## 16727 47 20 2 1
## 16728 48 20 2 1
## 16729 49 20 2 1
## 16730 50 20 2 1
## 16731 51 20 2 1
## 16732 52 20 2 1
## 16733 53 20 2 1
## 16734 54 20 2 1
## 16735 55 20 2 1
## 16736 56 20 2 1
## 16737 57 20 2 1
## 16738 58 20 2 1
## 16739 59 20 2 1
## 16740 60 20 2 1
## 16741 61 20 2 1
## 16742 62 20 2 1
## 16743 63 20 2 1
## 16744 64 20 2 1
## 16745 65 20 2 1
## 16746 66 20 2 1
## 16747 67 20 2 1
## 16748 68 20 2 1
## 16749 69 20 2 1
## 16750 70 20 2 1
## 16751 71 20 2 1
## 16752 72 20 2 1
## 16753 73 20 2 1
## 16754 74 20 2 1
## 16755 75 20 2 1
## 16756 76 20 2 1
## 16757 77 20 2 1
## 16758 78 20 2 1
## 16759 79 20 2 1
## 16760 80 20 2 1
## 16761 81 20 2 1
## 16762 82 20 2 1
## 16763 83 20 2 1
## 16764 84 20 2 1
## 16765 85 20 2 1
## 16766 86 20 2 1
## 16767 87 20 2 1
## 16768 88 20 2 1
## 16769 89 20 2 1
## 16770 90 20 2 1
## 16771 91 20 2 1
## 16772 92 20 2 1
## 16773 93 20 2 1
## 16774 94 20 2 1
## 16775 95 20 2 1
## 16776 96 20 2 1
## 16777 97 20 2 1
## 16778 98 20 2 1
## 16779 99 20 2 1
## 16780 100 20 2 1
## 16781 101 20 2 1
## 16782 102 20 2 1
## 16783 103 20 2 1
## 16784 104 20 2 1
## 16785 105 20 2 1
## 16786 106 20 2 1
## 16787 107 20 2 1
## 16788 108 20 2 1
## 16789 109 20 2 1
## 16790 110 20 2 1
## 16791 111 20 2 1
## 16792 112 20 2 1
## 16793 113 20 2 1
## 16794 114 20 2 1
## 16795 115 20 2 1
## 16796 116 20 2 1
## 16797 117 20 2 1
## 16798 118 20 2 1
## 16799 119 20 2 1
## 16800 120 20 2 1
## 16801 1 21 2 1
## 16802 2 21 2 1
## 16803 3 21 2 1
## 16804 4 21 2 1
## 16805 5 21 2 1
## 16806 6 21 2 1
## 16807 7 21 2 1
## 16808 8 21 2 1
## 16809 9 21 2 1
## 16810 10 21 2 1
## 16811 11 21 2 1
## 16812 12 21 2 1
## 16813 13 21 2 1
## 16814 14 21 2 1
## 16815 15 21 2 1
## 16816 16 21 2 1
## 16817 17 21 2 1
## 16818 18 21 2 1
## 16819 19 21 2 1
## 16820 20 21 2 1
## 16821 21 21 2 1
## 16822 22 21 2 1
## 16823 23 21 2 1
## 16824 24 21 2 1
## 16825 25 21 2 1
## 16826 26 21 2 1
## 16827 27 21 2 1
## 16828 28 21 2 1
## 16829 29 21 2 1
## 16830 30 21 2 1
## 16831 31 21 2 1
## 16832 32 21 2 1
## 16833 33 21 2 1
## 16834 34 21 2 1
## 16835 35 21 2 1
## 16836 36 21 2 1
## 16837 37 21 2 1
## 16838 38 21 2 1
## 16839 39 21 2 1
## 16840 40 21 2 1
## 16841 41 21 2 1
## 16842 42 21 2 1
## 16843 43 21 2 1
## 16844 44 21 2 1
## 16845 45 21 2 1
## 16846 46 21 2 1
## 16847 47 21 2 1
## 16848 48 21 2 1
## 16849 49 21 2 1
## 16850 50 21 2 1
## 16851 51 21 2 1
## 16852 52 21 2 1
## 16853 53 21 2 1
## 16854 54 21 2 1
## 16855 55 21 2 1
## 16856 56 21 2 1
## 16857 57 21 2 1
## 16858 58 21 2 1
## 16859 59 21 2 1
## 16860 60 21 2 1
## 16861 61 21 2 1
## 16862 62 21 2 1
## 16863 63 21 2 1
## 16864 64 21 2 1
## 16865 65 21 2 1
## 16866 66 21 2 1
## 16867 67 21 2 1
## 16868 68 21 2 1
## 16869 69 21 2 1
## 16870 70 21 2 1
## 16871 71 21 2 1
## 16872 72 21 2 1
## 16873 73 21 2 1
## 16874 74 21 2 1
## 16875 75 21 2 1
## 16876 76 21 2 1
## 16877 77 21 2 1
## 16878 78 21 2 1
## 16879 79 21 2 1
## 16880 80 21 2 1
## 16881 81 21 2 1
## 16882 82 21 2 1
## 16883 83 21 2 1
## 16884 84 21 2 1
## 16885 85 21 2 1
## 16886 86 21 2 1
## 16887 87 21 2 1
## 16888 88 21 2 1
## 16889 89 21 2 1
## 16890 90 21 2 1
## 16891 91 21 2 1
## 16892 92 21 2 1
## 16893 93 21 2 1
## 16894 94 21 2 1
## 16895 95 21 2 1
## 16896 96 21 2 1
## 16897 97 21 2 1
## 16898 98 21 2 1
## 16899 99 21 2 1
## 16900 100 21 2 1
## 16901 101 21 2 1
## 16902 102 21 2 1
## 16903 103 21 2 1
## 16904 104 21 2 1
## 16905 105 21 2 1
## 16906 106 21 2 1
## 16907 107 21 2 1
## 16908 108 21 2 1
## 16909 109 21 2 1
## 16910 110 21 2 1
## 16911 111 21 2 1
## 16912 112 21 2 1
## 16913 113 21 2 1
## 16914 114 21 2 1
## 16915 115 21 2 1
## 16916 116 21 2 1
## 16917 117 21 2 1
## 16918 118 21 2 1
## 16919 119 21 2 1
## 16920 120 21 2 1
## 16921 1 22 2 1
## 16922 2 22 2 1
## 16923 3 22 2 1
## 16924 4 22 2 1
## 16925 5 22 2 1
## 16926 6 22 2 1
## 16927 7 22 2 1
## 16928 8 22 2 1
## 16929 9 22 2 1
## 16930 10 22 2 1
## 16931 11 22 2 1
## 16932 12 22 2 1
## 16933 13 22 2 1
## 16934 14 22 2 1
## 16935 15 22 2 1
## 16936 16 22 2 1
## 16937 17 22 2 1
## 16938 18 22 2 1
## 16939 19 22 2 1
## 16940 20 22 2 1
## 16941 21 22 2 1
## 16942 22 22 2 1
## 16943 23 22 2 1
## 16944 24 22 2 1
## 16945 25 22 2 1
## 16946 26 22 2 1
## 16947 27 22 2 1
## 16948 28 22 2 1
## 16949 29 22 2 1
## 16950 30 22 2 1
## 16951 31 22 2 1
## 16952 32 22 2 1
## 16953 33 22 2 1
## 16954 34 22 2 1
## 16955 35 22 2 1
## 16956 36 22 2 1
## 16957 37 22 2 1
## 16958 38 22 2 1
## 16959 39 22 2 1
## 16960 40 22 2 1
## 16961 41 22 2 1
## 16962 42 22 2 1
## 16963 43 22 2 1
## 16964 44 22 2 1
## 16965 45 22 2 1
## 16966 46 22 2 1
## 16967 47 22 2 1
## 16968 48 22 2 1
## 16969 49 22 2 1
## 16970 50 22 2 1
## 16971 51 22 2 1
## 16972 52 22 2 1
## 16973 53 22 2 1
## 16974 54 22 2 1
## 16975 55 22 2 1
## 16976 56 22 2 1
## 16977 57 22 2 1
## 16978 58 22 2 1
## 16979 59 22 2 1
## 16980 60 22 2 1
## 16981 61 22 2 1
## 16982 62 22 2 1
## 16983 63 22 2 1
## 16984 64 22 2 1
## 16985 65 22 2 1
## 16986 66 22 2 1
## 16987 67 22 2 1
## 16988 68 22 2 1
## 16989 69 22 2 1
## 16990 70 22 2 1
## 16991 71 22 2 1
## 16992 72 22 2 1
## 16993 73 22 2 1
## 16994 74 22 2 1
## 16995 75 22 2 1
## 16996 76 22 2 1
## 16997 77 22 2 1
## 16998 78 22 2 1
## 16999 79 22 2 1
## 17000 80 22 2 1
## 17001 81 22 2 1
## 17002 82 22 2 1
## 17003 83 22 2 1
## 17004 84 22 2 1
## 17005 85 22 2 1
## 17006 86 22 2 1
## 17007 87 22 2 1
## 17008 88 22 2 1
## 17009 89 22 2 1
## 17010 90 22 2 1
## 17011 91 22 2 1
## 17012 92 22 2 1
## 17013 93 22 2 1
## 17014 94 22 2 1
## 17015 95 22 2 1
## 17016 96 22 2 1
## 17017 97 22 2 1
## 17018 98 22 2 1
## 17019 99 22 2 1
## 17020 100 22 2 1
## 17021 101 22 2 1
## 17022 102 22 2 1
## 17023 103 22 2 1
## 17024 104 22 2 1
## 17025 105 22 2 1
## 17026 106 22 2 1
## 17027 107 22 2 1
## 17028 108 22 2 1
## 17029 109 22 2 1
## 17030 110 22 2 1
## 17031 111 22 2 1
## 17032 112 22 2 1
## 17033 113 22 2 1
## 17034 114 22 2 1
## 17035 115 22 2 1
## 17036 116 22 2 1
## 17037 117 22 2 1
## 17038 118 22 2 1
## 17039 119 22 2 1
## 17040 120 22 2 1
## 17041 1 23 2 1
## 17042 2 23 2 1
## 17043 3 23 2 1
## 17044 4 23 2 1
## 17045 5 23 2 1
## 17046 6 23 2 1
## 17047 7 23 2 1
## 17048 8 23 2 1
## 17049 9 23 2 1
## 17050 10 23 2 1
## 17051 11 23 2 1
## 17052 12 23 2 1
## 17053 13 23 2 1
## 17054 14 23 2 1
## 17055 15 23 2 1
## 17056 16 23 2 1
## 17057 17 23 2 1
## 17058 18 23 2 1
## 17059 19 23 2 1
## 17060 20 23 2 1
## 17061 21 23 2 1
## 17062 22 23 2 1
## 17063 23 23 2 1
## 17064 24 23 2 1
## 17065 25 23 2 1
## 17066 26 23 2 1
## 17067 27 23 2 1
## 17068 28 23 2 1
## 17069 29 23 2 1
## 17070 30 23 2 1
## 17071 31 23 2 1
## 17072 32 23 2 1
## 17073 33 23 2 1
## 17074 34 23 2 1
## 17075 35 23 2 1
## 17076 36 23 2 1
## 17077 37 23 2 1
## 17078 38 23 2 1
## 17079 39 23 2 1
## 17080 40 23 2 1
## 17081 41 23 2 1
## 17082 42 23 2 1
## 17083 43 23 2 1
## 17084 44 23 2 1
## 17085 45 23 2 1
## 17086 46 23 2 1
## 17087 47 23 2 1
## 17088 48 23 2 1
## 17089 49 23 2 1
## 17090 50 23 2 1
## 17091 51 23 2 1
## 17092 52 23 2 1
## 17093 53 23 2 1
## 17094 54 23 2 1
## 17095 55 23 2 1
## 17096 56 23 2 1
## 17097 57 23 2 1
## 17098 58 23 2 1
## 17099 59 23 2 1
## 17100 60 23 2 1
## 17101 61 23 2 1
## 17102 62 23 2 1
## 17103 63 23 2 1
## 17104 64 23 2 1
## 17105 65 23 2 1
## 17106 66 23 2 1
## 17107 67 23 2 1
## 17108 68 23 2 1
## 17109 69 23 2 1
## 17110 70 23 2 1
## 17111 71 23 2 1
## 17112 72 23 2 1
## 17113 73 23 2 1
## 17114 74 23 2 1
## 17115 75 23 2 1
## 17116 76 23 2 1
## 17117 77 23 2 1
## 17118 78 23 2 1
## 17119 79 23 2 1
## 17120 80 23 2 1
## 17121 81 23 2 1
## 17122 82 23 2 1
## 17123 83 23 2 1
## 17124 84 23 2 1
## 17125 85 23 2 1
## 17126 86 23 2 1
## 17127 87 23 2 1
## 17128 88 23 2 1
## 17129 89 23 2 1
## 17130 90 23 2 1
## 17131 91 23 2 1
## 17132 92 23 2 1
## 17133 93 23 2 1
## 17134 94 23 2 1
## 17135 95 23 2 1
## 17136 96 23 2 1
## 17137 97 23 2 1
## 17138 98 23 2 1
## 17139 99 23 2 1
## 17140 100 23 2 1
## 17141 101 23 2 1
## 17142 102 23 2 1
## 17143 103 23 2 1
## 17144 104 23 2 1
## 17145 105 23 2 1
## 17146 106 23 2 1
## 17147 107 23 2 1
## 17148 108 23 2 1
## 17149 109 23 2 1
## 17150 110 23 2 1
## 17151 111 23 2 1
## 17152 112 23 2 1
## 17153 113 23 2 1
## 17154 114 23 2 1
## 17155 115 23 2 1
## 17156 116 23 2 1
## 17157 117 23 2 1
## 17158 118 23 2 1
## 17159 119 23 2 1
## 17160 120 23 2 1
## 17161 1 24 2 1
## 17162 2 24 2 1
## 17163 3 24 2 1
## 17164 4 24 2 1
## 17165 5 24 2 1
## 17166 6 24 2 1
## 17167 7 24 2 1
## 17168 8 24 2 1
## 17169 9 24 2 1
## 17170 10 24 2 1
## 17171 11 24 2 1
## 17172 12 24 2 1
## 17173 13 24 2 1
## 17174 14 24 2 1
## 17175 15 24 2 1
## 17176 16 24 2 1
## 17177 17 24 2 1
## 17178 18 24 2 1
## 17179 19 24 2 1
## 17180 20 24 2 1
## 17181 21 24 2 1
## 17182 22 24 2 1
## 17183 23 24 2 1
## 17184 24 24 2 1
## 17185 25 24 2 1
## 17186 26 24 2 1
## 17187 27 24 2 1
## 17188 28 24 2 1
## 17189 29 24 2 1
## 17190 30 24 2 1
## 17191 31 24 2 1
## 17192 32 24 2 1
## 17193 33 24 2 1
## 17194 34 24 2 1
## 17195 35 24 2 1
## 17196 36 24 2 1
## 17197 37 24 2 1
## 17198 38 24 2 1
## 17199 39 24 2 1
## 17200 40 24 2 1
## 17201 41 24 2 1
## 17202 42 24 2 1
## 17203 43 24 2 1
## 17204 44 24 2 1
## 17205 45 24 2 1
## 17206 46 24 2 1
## 17207 47 24 2 1
## 17208 48 24 2 1
## 17209 49 24 2 1
## 17210 50 24 2 1
## 17211 51 24 2 1
## 17212 52 24 2 1
## 17213 53 24 2 1
## 17214 54 24 2 1
## 17215 55 24 2 1
## 17216 56 24 2 1
## 17217 57 24 2 1
## 17218 58 24 2 1
## 17219 59 24 2 1
## 17220 60 24 2 1
## 17221 61 24 2 1
## 17222 62 24 2 1
## 17223 63 24 2 1
## 17224 64 24 2 1
## 17225 65 24 2 1
## 17226 66 24 2 1
## 17227 67 24 2 1
## 17228 68 24 2 1
## 17229 69 24 2 1
## 17230 70 24 2 1
## 17231 71 24 2 1
## 17232 72 24 2 1
## 17233 73 24 2 1
## 17234 74 24 2 1
## 17235 75 24 2 1
## 17236 76 24 2 1
## 17237 77 24 2 1
## 17238 78 24 2 1
## 17239 79 24 2 1
## 17240 80 24 2 1
## 17241 81 24 2 1
## 17242 82 24 2 1
## 17243 83 24 2 1
## 17244 84 24 2 1
## 17245 85 24 2 1
## 17246 86 24 2 1
## 17247 87 24 2 1
## 17248 88 24 2 1
## 17249 89 24 2 1
## 17250 90 24 2 1
## 17251 91 24 2 1
## 17252 92 24 2 1
## 17253 93 24 2 1
## 17254 94 24 2 1
## 17255 95 24 2 1
## 17256 96 24 2 1
## 17257 97 24 2 1
## 17258 98 24 2 1
## 17259 99 24 2 1
## 17260 100 24 2 1
## 17261 101 24 2 1
## 17262 102 24 2 1
## 17263 103 24 2 1
## 17264 104 24 2 1
## 17265 105 24 2 1
## 17266 106 24 2 1
## 17267 107 24 2 1
## 17268 108 24 2 1
## 17269 109 24 2 1
## 17270 110 24 2 1
## 17271 111 24 2 1
## 17272 112 24 2 1
## 17273 113 24 2 1
## 17274 114 24 2 1
## 17275 115 24 2 1
## 17276 116 24 2 1
## 17277 117 24 2 1
## 17278 118 24 2 1
## 17279 119 24 2 1
## 17280 120 24 2 1
## 17281 1 25 2 1
## 17282 2 25 2 1
## 17283 3 25 2 1
## 17284 4 25 2 1
## 17285 5 25 2 1
## 17286 6 25 2 1
## 17287 7 25 2 1
## 17288 8 25 2 1
## 17289 9 25 2 1
## 17290 10 25 2 1
## 17291 11 25 2 1
## 17292 12 25 2 1
## 17293 13 25 2 1
## 17294 14 25 2 1
## 17295 15 25 2 1
## 17296 16 25 2 1
## 17297 17 25 2 1
## 17298 18 25 2 1
## 17299 19 25 2 1
## 17300 20 25 2 1
## 17301 21 25 2 1
## 17302 22 25 2 1
## 17303 23 25 2 1
## 17304 24 25 2 1
## 17305 25 25 2 1
## 17306 26 25 2 1
## 17307 27 25 2 1
## 17308 28 25 2 1
## 17309 29 25 2 1
## 17310 30 25 2 1
## 17311 31 25 2 1
## 17312 32 25 2 1
## 17313 33 25 2 1
## 17314 34 25 2 1
## 17315 35 25 2 1
## 17316 36 25 2 1
## 17317 37 25 2 1
## 17318 38 25 2 1
## 17319 39 25 2 1
## 17320 40 25 2 1
## 17321 41 25 2 1
## 17322 42 25 2 1
## 17323 43 25 2 1
## 17324 44 25 2 1
## 17325 45 25 2 1
## 17326 46 25 2 1
## 17327 47 25 2 1
## 17328 48 25 2 1
## 17329 49 25 2 1
## 17330 50 25 2 1
## 17331 51 25 2 1
## 17332 52 25 2 1
## 17333 53 25 2 1
## 17334 54 25 2 1
## 17335 55 25 2 1
## 17336 56 25 2 1
## 17337 57 25 2 1
## 17338 58 25 2 1
## 17339 59 25 2 1
## 17340 60 25 2 1
## 17341 61 25 2 1
## 17342 62 25 2 1
## 17343 63 25 2 1
## 17344 64 25 2 1
## 17345 65 25 2 1
## 17346 66 25 2 1
## 17347 67 25 2 1
## 17348 68 25 2 1
## 17349 69 25 2 1
## 17350 70 25 2 1
## 17351 71 25 2 1
## 17352 72 25 2 1
## 17353 73 25 2 1
## 17354 74 25 2 1
## 17355 75 25 2 1
## 17356 76 25 2 1
## 17357 77 25 2 1
## 17358 78 25 2 1
## 17359 79 25 2 1
## 17360 80 25 2 1
## 17361 81 25 2 1
## 17362 82 25 2 1
## 17363 83 25 2 1
## 17364 84 25 2 1
## 17365 85 25 2 1
## 17366 86 25 2 1
## 17367 87 25 2 1
## 17368 88 25 2 1
## 17369 89 25 2 1
## 17370 90 25 2 1
## 17371 91 25 2 1
## 17372 92 25 2 1
## 17373 93 25 2 1
## 17374 94 25 2 1
## 17375 95 25 2 1
## 17376 96 25 2 1
## 17377 97 25 2 1
## 17378 98 25 2 1
## 17379 99 25 2 1
## 17380 100 25 2 1
## 17381 101 25 2 1
## 17382 102 25 2 1
## 17383 103 25 2 1
## 17384 104 25 2 1
## 17385 105 25 2 1
## 17386 106 25 2 1
## 17387 107 25 2 1
## 17388 108 25 2 1
## 17389 109 25 2 1
## 17390 110 25 2 1
## 17391 111 25 2 1
## 17392 112 25 2 1
## 17393 113 25 2 1
## 17394 114 25 2 1
## 17395 115 25 2 1
## 17396 116 25 2 1
## 17397 117 25 2 1
## 17398 118 25 2 1
## 17399 119 25 2 1
## 17400 120 25 2 1
## 17401 1 26 2 1
## 17402 2 26 2 1
## 17403 3 26 2 1
## 17404 4 26 2 1
## 17405 5 26 2 1
## 17406 6 26 2 1
## 17407 7 26 2 1
## 17408 8 26 2 1
## 17409 9 26 2 1
## 17410 10 26 2 1
## 17411 11 26 2 1
## 17412 12 26 2 1
## 17413 13 26 2 1
## 17414 14 26 2 1
## 17415 15 26 2 1
## 17416 16 26 2 1
## 17417 17 26 2 1
## 17418 18 26 2 1
## 17419 19 26 2 1
## 17420 20 26 2 1
## 17421 21 26 2 1
## 17422 22 26 2 1
## 17423 23 26 2 1
## 17424 24 26 2 1
## 17425 25 26 2 1
## 17426 26 26 2 1
## 17427 27 26 2 1
## 17428 28 26 2 1
## 17429 29 26 2 1
## 17430 30 26 2 1
## 17431 31 26 2 1
## 17432 32 26 2 1
## 17433 33 26 2 1
## 17434 34 26 2 1
## 17435 35 26 2 1
## 17436 36 26 2 1
## 17437 37 26 2 1
## 17438 38 26 2 1
## 17439 39 26 2 1
## 17440 40 26 2 1
## 17441 41 26 2 1
## 17442 42 26 2 1
## 17443 43 26 2 1
## 17444 44 26 2 1
## 17445 45 26 2 1
## 17446 46 26 2 1
## 17447 47 26 2 1
## 17448 48 26 2 1
## 17449 49 26 2 1
## 17450 50 26 2 1
## 17451 51 26 2 1
## 17452 52 26 2 1
## 17453 53 26 2 1
## 17454 54 26 2 1
## 17455 55 26 2 1
## 17456 56 26 2 1
## 17457 57 26 2 1
## 17458 58 26 2 1
## 17459 59 26 2 1
## 17460 60 26 2 1
## 17461 61 26 2 1
## 17462 62 26 2 1
## 17463 63 26 2 1
## 17464 64 26 2 1
## 17465 65 26 2 1
## 17466 66 26 2 1
## 17467 67 26 2 1
## 17468 68 26 2 1
## 17469 69 26 2 1
## 17470 70 26 2 1
## 17471 71 26 2 1
## 17472 72 26 2 1
## 17473 73 26 2 1
## 17474 74 26 2 1
## 17475 75 26 2 1
## 17476 76 26 2 1
## 17477 77 26 2 1
## 17478 78 26 2 1
## 17479 79 26 2 1
## 17480 80 26 2 1
## 17481 81 26 2 1
## 17482 82 26 2 1
## 17483 83 26 2 1
## 17484 84 26 2 1
## 17485 85 26 2 1
## 17486 86 26 2 1
## 17487 87 26 2 1
## 17488 88 26 2 1
## 17489 89 26 2 1
## 17490 90 26 2 1
## 17491 91 26 2 1
## 17492 92 26 2 1
## 17493 93 26 2 1
## 17494 94 26 2 1
## 17495 95 26 2 1
## 17496 96 26 2 1
## 17497 97 26 2 1
## 17498 98 26 2 1
## 17499 99 26 2 1
## 17500 100 26 2 1
## 17501 101 26 2 1
## 17502 102 26 2 1
## 17503 103 26 2 1
## 17504 104 26 2 1
## 17505 105 26 2 1
## 17506 106 26 2 1
## 17507 107 26 2 1
## 17508 108 26 2 1
## 17509 109 26 2 1
## 17510 110 26 2 1
## 17511 111 26 2 1
## 17512 112 26 2 1
## 17513 113 26 2 1
## 17514 114 26 2 1
## 17515 115 26 2 1
## 17516 116 26 2 1
## 17517 117 26 2 1
## 17518 118 26 2 1
## 17519 119 26 2 1
## 17520 120 26 2 1
## 17521 1 27 2 1
## 17522 2 27 2 1
## 17523 3 27 2 1
## 17524 4 27 2 1
## 17525 5 27 2 1
## 17526 6 27 2 1
## 17527 7 27 2 1
## 17528 8 27 2 1
## 17529 9 27 2 1
## 17530 10 27 2 1
## 17531 11 27 2 1
## 17532 12 27 2 1
## 17533 13 27 2 1
## 17534 14 27 2 1
## 17535 15 27 2 1
## 17536 16 27 2 1
## 17537 17 27 2 1
## 17538 18 27 2 1
## 17539 19 27 2 1
## 17540 20 27 2 1
## 17541 21 27 2 1
## 17542 22 27 2 1
## 17543 23 27 2 1
## 17544 24 27 2 1
## 17545 25 27 2 1
## 17546 26 27 2 1
## 17547 27 27 2 1
## 17548 28 27 2 1
## 17549 29 27 2 1
## 17550 30 27 2 1
## 17551 31 27 2 1
## 17552 32 27 2 1
## 17553 33 27 2 1
## 17554 34 27 2 1
## 17555 35 27 2 1
## 17556 36 27 2 1
## 17557 37 27 2 1
## 17558 38 27 2 1
## 17559 39 27 2 1
## 17560 40 27 2 1
## 17561 41 27 2 1
## 17562 42 27 2 1
## 17563 43 27 2 1
## 17564 44 27 2 1
## 17565 45 27 2 1
## 17566 46 27 2 1
## 17567 47 27 2 1
## 17568 48 27 2 1
## 17569 49 27 2 1
## 17570 50 27 2 1
## 17571 51 27 2 1
## 17572 52 27 2 1
## 17573 53 27 2 1
## 17574 54 27 2 1
## 17575 55 27 2 1
## 17576 56 27 2 1
## 17577 57 27 2 1
## 17578 58 27 2 1
## 17579 59 27 2 1
## 17580 60 27 2 1
## 17581 61 27 2 1
## 17582 62 27 2 1
## 17583 63 27 2 1
## 17584 64 27 2 1
## 17585 65 27 2 1
## 17586 66 27 2 1
## 17587 67 27 2 1
## 17588 68 27 2 1
## 17589 69 27 2 1
## 17590 70 27 2 1
## 17591 71 27 2 1
## 17592 72 27 2 1
## 17593 73 27 2 1
## 17594 74 27 2 1
## 17595 75 27 2 1
## 17596 76 27 2 1
## 17597 77 27 2 1
## 17598 78 27 2 1
## 17599 79 27 2 1
## 17600 80 27 2 1
## 17601 81 27 2 1
## 17602 82 27 2 1
## 17603 83 27 2 1
## 17604 84 27 2 1
## 17605 85 27 2 1
## 17606 86 27 2 1
## 17607 87 27 2 1
## 17608 88 27 2 1
## 17609 89 27 2 1
## 17610 90 27 2 1
## 17611 91 27 2 1
## 17612 92 27 2 1
## 17613 93 27 2 1
## 17614 94 27 2 1
## 17615 95 27 2 1
## 17616 96 27 2 1
## 17617 97 27 2 1
## 17618 98 27 2 1
## 17619 99 27 2 1
## 17620 100 27 2 1
## 17621 101 27 2 1
## 17622 102 27 2 1
## 17623 103 27 2 1
## 17624 104 27 2 1
## 17625 105 27 2 1
## 17626 106 27 2 1
## 17627 107 27 2 1
## 17628 108 27 2 1
## 17629 109 27 2 1
## 17630 110 27 2 1
## 17631 111 27 2 1
## 17632 112 27 2 1
## 17633 113 27 2 1
## 17634 114 27 2 1
## 17635 115 27 2 1
## 17636 116 27 2 1
## 17637 117 27 2 1
## 17638 118 27 2 1
## 17639 119 27 2 1
## 17640 120 27 2 1
## 17641 1 28 2 1
## 17642 2 28 2 1
## 17643 3 28 2 1
## 17644 4 28 2 1
## 17645 5 28 2 1
## 17646 6 28 2 1
## 17647 7 28 2 1
## 17648 8 28 2 1
## 17649 9 28 2 1
## 17650 10 28 2 1
## 17651 11 28 2 1
## 17652 12 28 2 1
## 17653 13 28 2 1
## 17654 14 28 2 1
## 17655 15 28 2 1
## 17656 16 28 2 1
## 17657 17 28 2 1
## 17658 18 28 2 1
## 17659 19 28 2 1
## 17660 20 28 2 1
## 17661 21 28 2 1
## 17662 22 28 2 1
## 17663 23 28 2 1
## 17664 24 28 2 1
## 17665 25 28 2 1
## 17666 26 28 2 1
## 17667 27 28 2 1
## 17668 28 28 2 1
## 17669 29 28 2 1
## 17670 30 28 2 1
## 17671 31 28 2 1
## 17672 32 28 2 1
## 17673 33 28 2 1
## 17674 34 28 2 1
## 17675 35 28 2 1
## 17676 36 28 2 1
## 17677 37 28 2 1
## 17678 38 28 2 1
## 17679 39 28 2 1
## 17680 40 28 2 1
## 17681 41 28 2 1
## 17682 42 28 2 1
## 17683 43 28 2 1
## 17684 44 28 2 1
## 17685 45 28 2 1
## 17686 46 28 2 1
## 17687 47 28 2 1
## 17688 48 28 2 1
## 17689 49 28 2 1
## 17690 50 28 2 1
## 17691 51 28 2 1
## 17692 52 28 2 1
## 17693 53 28 2 1
## 17694 54 28 2 1
## 17695 55 28 2 1
## 17696 56 28 2 1
## 17697 57 28 2 1
## 17698 58 28 2 1
## 17699 59 28 2 1
## 17700 60 28 2 1
## 17701 61 28 2 1
## 17702 62 28 2 1
## 17703 63 28 2 1
## 17704 64 28 2 1
## 17705 65 28 2 1
## 17706 66 28 2 1
## 17707 67 28 2 1
## 17708 68 28 2 1
## 17709 69 28 2 1
## 17710 70 28 2 1
## 17711 71 28 2 1
## 17712 72 28 2 1
## 17713 73 28 2 1
## 17714 74 28 2 1
## 17715 75 28 2 1
## 17716 76 28 2 1
## 17717 77 28 2 1
## 17718 78 28 2 1
## 17719 79 28 2 1
## 17720 80 28 2 1
## 17721 81 28 2 1
## 17722 82 28 2 1
## 17723 83 28 2 1
## 17724 84 28 2 1
## 17725 85 28 2 1
## 17726 86 28 2 1
## 17727 87 28 2 1
## 17728 88 28 2 1
## 17729 89 28 2 1
## 17730 90 28 2 1
## 17731 91 28 2 1
## 17732 92 28 2 1
## 17733 93 28 2 1
## 17734 94 28 2 1
## 17735 95 28 2 1
## 17736 96 28 2 1
## 17737 97 28 2 1
## 17738 98 28 2 1
## 17739 99 28 2 1
## 17740 100 28 2 1
## 17741 101 28 2 1
## 17742 102 28 2 1
## 17743 103 28 2 1
## 17744 104 28 2 1
## 17745 105 28 2 1
## 17746 106 28 2 1
## 17747 107 28 2 1
## 17748 108 28 2 1
## 17749 109 28 2 1
## 17750 110 28 2 1
## 17751 111 28 2 1
## 17752 112 28 2 1
## 17753 113 28 2 1
## 17754 114 28 2 1
## 17755 115 28 2 1
## 17756 116 28 2 1
## 17757 117 28 2 1
## 17758 118 28 2 1
## 17759 119 28 2 1
## 17760 120 28 2 1
## 17761 1 29 2 1
## 17762 2 29 2 1
## 17763 3 29 2 1
## 17764 4 29 2 1
## 17765 5 29 2 1
## 17766 6 29 2 1
## 17767 7 29 2 1
## 17768 8 29 2 1
## 17769 9 29 2 1
## 17770 10 29 2 1
## 17771 11 29 2 1
## 17772 12 29 2 1
## 17773 13 29 2 1
## 17774 14 29 2 1
## 17775 15 29 2 1
## 17776 16 29 2 1
## 17777 17 29 2 1
## 17778 18 29 2 1
## 17779 19 29 2 1
## 17780 20 29 2 1
## 17781 21 29 2 1
## 17782 22 29 2 1
## 17783 23 29 2 1
## 17784 24 29 2 1
## 17785 25 29 2 1
## 17786 26 29 2 1
## 17787 27 29 2 1
## 17788 28 29 2 1
## 17789 29 29 2 1
## 17790 30 29 2 1
## 17791 31 29 2 1
## 17792 32 29 2 1
## 17793 33 29 2 1
## 17794 34 29 2 1
## 17795 35 29 2 1
## 17796 36 29 2 1
## 17797 37 29 2 1
## 17798 38 29 2 1
## 17799 39 29 2 1
## 17800 40 29 2 1
## 17801 41 29 2 1
## 17802 42 29 2 1
## 17803 43 29 2 1
## 17804 44 29 2 1
## 17805 45 29 2 1
## 17806 46 29 2 1
## 17807 47 29 2 1
## 17808 48 29 2 1
## 17809 49 29 2 1
## 17810 50 29 2 1
## 17811 51 29 2 1
## 17812 52 29 2 1
## 17813 53 29 2 1
## 17814 54 29 2 1
## 17815 55 29 2 1
## 17816 56 29 2 1
## 17817 57 29 2 1
## 17818 58 29 2 1
## 17819 59 29 2 1
## 17820 60 29 2 1
## 17821 61 29 2 1
## 17822 62 29 2 1
## 17823 63 29 2 1
## 17824 64 29 2 1
## 17825 65 29 2 1
## 17826 66 29 2 1
## 17827 67 29 2 1
## 17828 68 29 2 1
## 17829 69 29 2 1
## 17830 70 29 2 1
## 17831 71 29 2 1
## 17832 72 29 2 1
## 17833 73 29 2 1
## 17834 74 29 2 1
## 17835 75 29 2 1
## 17836 76 29 2 1
## 17837 77 29 2 1
## 17838 78 29 2 1
## 17839 79 29 2 1
## 17840 80 29 2 1
## 17841 81 29 2 1
## 17842 82 29 2 1
## 17843 83 29 2 1
## 17844 84 29 2 1
## 17845 85 29 2 1
## 17846 86 29 2 1
## 17847 87 29 2 1
## 17848 88 29 2 1
## 17849 89 29 2 1
## 17850 90 29 2 1
## 17851 91 29 2 1
## 17852 92 29 2 1
## 17853 93 29 2 1
## 17854 94 29 2 1
## 17855 95 29 2 1
## 17856 96 29 2 1
## 17857 97 29 2 1
## 17858 98 29 2 1
## 17859 99 29 2 1
## 17860 100 29 2 1
## 17861 101 29 2 1
## 17862 102 29 2 1
## 17863 103 29 2 1
## 17864 104 29 2 1
## 17865 105 29 2 1
## 17866 106 29 2 1
## 17867 107 29 2 1
## 17868 108 29 2 1
## 17869 109 29 2 1
## 17870 110 29 2 1
## 17871 111 29 2 1
## 17872 112 29 2 1
## 17873 113 29 2 1
## 17874 114 29 2 1
## 17875 115 29 2 1
## 17876 116 29 2 1
## 17877 117 29 2 1
## 17878 118 29 2 1
## 17879 119 29 2 1
## 17880 120 29 2 1
## 17881 1 30 2 1
## 17882 2 30 2 1
## 17883 3 30 2 1
## 17884 4 30 2 1
## 17885 5 30 2 1
## 17886 6 30 2 1
## 17887 7 30 2 1
## 17888 8 30 2 1
## 17889 9 30 2 1
## 17890 10 30 2 1
## 17891 11 30 2 1
## 17892 12 30 2 1
## 17893 13 30 2 1
## 17894 14 30 2 1
## 17895 15 30 2 1
## 17896 16 30 2 1
## 17897 17 30 2 1
## 17898 18 30 2 1
## 17899 19 30 2 1
## 17900 20 30 2 1
## 17901 21 30 2 1
## 17902 22 30 2 1
## 17903 23 30 2 1
## 17904 24 30 2 1
## 17905 25 30 2 1
## 17906 26 30 2 1
## 17907 27 30 2 1
## 17908 28 30 2 1
## 17909 29 30 2 1
## 17910 30 30 2 1
## 17911 31 30 2 1
## 17912 32 30 2 1
## 17913 33 30 2 1
## 17914 34 30 2 1
## 17915 35 30 2 1
## 17916 36 30 2 1
## 17917 37 30 2 1
## 17918 38 30 2 1
## 17919 39 30 2 1
## 17920 40 30 2 1
## 17921 41 30 2 1
## 17922 42 30 2 1
## 17923 43 30 2 1
## 17924 44 30 2 1
## 17925 45 30 2 1
## 17926 46 30 2 1
## 17927 47 30 2 1
## 17928 48 30 2 1
## 17929 49 30 2 1
## 17930 50 30 2 1
## 17931 51 30 2 1
## 17932 52 30 2 1
## 17933 53 30 2 1
## 17934 54 30 2 1
## 17935 55 30 2 1
## 17936 56 30 2 1
## 17937 57 30 2 1
## 17938 58 30 2 1
## 17939 59 30 2 1
## 17940 60 30 2 1
## 17941 61 30 2 1
## 17942 62 30 2 1
## 17943 63 30 2 1
## 17944 64 30 2 1
## 17945 65 30 2 1
## 17946 66 30 2 1
## 17947 67 30 2 1
## 17948 68 30 2 1
## 17949 69 30 2 1
## 17950 70 30 2 1
## 17951 71 30 2 1
## 17952 72 30 2 1
## 17953 73 30 2 1
## 17954 74 30 2 1
## 17955 75 30 2 1
## 17956 76 30 2 1
## 17957 77 30 2 1
## 17958 78 30 2 1
## 17959 79 30 2 1
## 17960 80 30 2 1
## 17961 81 30 2 1
## 17962 82 30 2 1
## 17963 83 30 2 1
## 17964 84 30 2 1
## 17965 85 30 2 1
## 17966 86 30 2 1
## 17967 87 30 2 1
## 17968 88 30 2 1
## 17969 89 30 2 1
## 17970 90 30 2 1
## 17971 91 30 2 1
## 17972 92 30 2 1
## 17973 93 30 2 1
## 17974 94 30 2 1
## 17975 95 30 2 1
## 17976 96 30 2 1
## 17977 97 30 2 1
## 17978 98 30 2 1
## 17979 99 30 2 1
## 17980 100 30 2 1
## 17981 101 30 2 1
## 17982 102 30 2 1
## 17983 103 30 2 1
## 17984 104 30 2 1
## 17985 105 30 2 1
## 17986 106 30 2 1
## 17987 107 30 2 1
## 17988 108 30 2 1
## 17989 109 30 2 1
## 17990 110 30 2 1
## 17991 111 30 2 1
## 17992 112 30 2 1
## 17993 113 30 2 1
## 17994 114 30 2 1
## 17995 115 30 2 1
## 17996 116 30 2 1
## 17997 117 30 2 1
## 17998 118 30 2 1
## 17999 119 30 2 1
## 18000 120 30 2 1
## 18001 1 31 2 1
## 18002 2 31 2 1
## 18003 3 31 2 1
## 18004 4 31 2 1
## 18005 5 31 2 1
## 18006 6 31 2 1
## 18007 7 31 2 1
## 18008 8 31 2 1
## 18009 9 31 2 1
## 18010 10 31 2 1
## 18011 11 31 2 1
## 18012 12 31 2 1
## 18013 13 31 2 1
## 18014 14 31 2 1
## 18015 15 31 2 1
## 18016 16 31 2 1
## 18017 17 31 2 1
## 18018 18 31 2 1
## 18019 19 31 2 1
## 18020 20 31 2 1
## 18021 21 31 2 1
## 18022 22 31 2 1
## 18023 23 31 2 1
## 18024 24 31 2 1
## 18025 25 31 2 1
## 18026 26 31 2 1
## 18027 27 31 2 1
## 18028 28 31 2 1
## 18029 29 31 2 1
## 18030 30 31 2 1
## 18031 31 31 2 1
## 18032 32 31 2 1
## 18033 33 31 2 1
## 18034 34 31 2 1
## 18035 35 31 2 1
## 18036 36 31 2 1
## 18037 37 31 2 1
## 18038 38 31 2 1
## 18039 39 31 2 1
## 18040 40 31 2 1
## 18041 41 31 2 1
## 18042 42 31 2 1
## 18043 43 31 2 1
## 18044 44 31 2 1
## 18045 45 31 2 1
## 18046 46 31 2 1
## 18047 47 31 2 1
## 18048 48 31 2 1
## 18049 49 31 2 1
## 18050 50 31 2 1
## 18051 51 31 2 1
## 18052 52 31 2 1
## 18053 53 31 2 1
## 18054 54 31 2 1
## 18055 55 31 2 1
## 18056 56 31 2 1
## 18057 57 31 2 1
## 18058 58 31 2 1
## 18059 59 31 2 1
## 18060 60 31 2 1
## 18061 61 31 2 1
## 18062 62 31 2 1
## 18063 63 31 2 1
## 18064 64 31 2 1
## 18065 65 31 2 1
## 18066 66 31 2 1
## 18067 67 31 2 1
## 18068 68 31 2 1
## 18069 69 31 2 1
## 18070 70 31 2 1
## 18071 71 31 2 1
## 18072 72 31 2 1
## 18073 73 31 2 1
## 18074 74 31 2 1
## 18075 75 31 2 1
## 18076 76 31 2 1
## 18077 77 31 2 1
## 18078 78 31 2 1
## 18079 79 31 2 1
## 18080 80 31 2 1
## 18081 81 31 2 1
## 18082 82 31 2 1
## 18083 83 31 2 1
## 18084 84 31 2 1
## 18085 85 31 2 1
## 18086 86 31 2 1
## 18087 87 31 2 1
## 18088 88 31 2 1
## 18089 89 31 2 1
## 18090 90 31 2 1
## 18091 91 31 2 1
## 18092 92 31 2 1
## 18093 93 31 2 1
## 18094 94 31 2 1
## 18095 95 31 2 1
## 18096 96 31 2 1
## 18097 97 31 2 1
## 18098 98 31 2 1
## 18099 99 31 2 1
## 18100 100 31 2 1
## 18101 101 31 2 1
## 18102 102 31 2 1
## 18103 103 31 2 1
## 18104 104 31 2 1
## 18105 105 31 2 1
## 18106 106 31 2 1
## 18107 107 31 2 1
## 18108 108 31 2 1
## 18109 109 31 2 1
## 18110 110 31 2 1
## 18111 111 31 2 1
## 18112 112 31 2 1
## 18113 113 31 2 1
## 18114 114 31 2 1
## 18115 115 31 2 1
## 18116 116 31 2 1
## 18117 117 31 2 1
## 18118 118 31 2 1
## 18119 119 31 2 1
## 18120 120 31 2 1
## 18121 1 32 2 1
## 18122 2 32 2 1
## 18123 3 32 2 1
## 18124 4 32 2 1
## 18125 5 32 2 1
## 18126 6 32 2 1
## 18127 7 32 2 1
## 18128 8 32 2 1
## 18129 9 32 2 1
## 18130 10 32 2 1
## 18131 11 32 2 1
## 18132 12 32 2 1
## 18133 13 32 2 1
## 18134 14 32 2 1
## 18135 15 32 2 1
## 18136 16 32 2 1
## 18137 17 32 2 1
## 18138 18 32 2 1
## 18139 19 32 2 1
## 18140 20 32 2 1
## 18141 21 32 2 1
## 18142 22 32 2 1
## 18143 23 32 2 1
## 18144 24 32 2 1
## 18145 25 32 2 1
## 18146 26 32 2 1
## 18147 27 32 2 1
## 18148 28 32 2 1
## 18149 29 32 2 1
## 18150 30 32 2 1
## 18151 31 32 2 1
## 18152 32 32 2 1
## 18153 33 32 2 1
## 18154 34 32 2 1
## 18155 35 32 2 1
## 18156 36 32 2 1
## 18157 37 32 2 1
## 18158 38 32 2 1
## 18159 39 32 2 1
## 18160 40 32 2 1
## 18161 41 32 2 1
## 18162 42 32 2 1
## 18163 43 32 2 1
## 18164 44 32 2 1
## 18165 45 32 2 1
## 18166 46 32 2 1
## 18167 47 32 2 1
## 18168 48 32 2 1
## 18169 49 32 2 1
## 18170 50 32 2 1
## 18171 51 32 2 1
## 18172 52 32 2 1
## 18173 53 32 2 1
## 18174 54 32 2 1
## 18175 55 32 2 1
## 18176 56 32 2 1
## 18177 57 32 2 1
## 18178 58 32 2 1
## 18179 59 32 2 1
## 18180 60 32 2 1
## 18181 61 32 2 1
## 18182 62 32 2 1
## 18183 63 32 2 1
## 18184 64 32 2 1
## 18185 65 32 2 1
## 18186 66 32 2 1
## 18187 67 32 2 1
## 18188 68 32 2 1
## 18189 69 32 2 1
## 18190 70 32 2 1
## 18191 71 32 2 1
## 18192 72 32 2 1
## 18193 73 32 2 1
## 18194 74 32 2 1
## 18195 75 32 2 1
## 18196 76 32 2 1
## 18197 77 32 2 1
## 18198 78 32 2 1
## 18199 79 32 2 1
## 18200 80 32 2 1
## 18201 81 32 2 1
## 18202 82 32 2 1
## 18203 83 32 2 1
## 18204 84 32 2 1
## 18205 85 32 2 1
## 18206 86 32 2 1
## 18207 87 32 2 1
## 18208 88 32 2 1
## 18209 89 32 2 1
## 18210 90 32 2 1
## 18211 91 32 2 1
## 18212 92 32 2 1
## 18213 93 32 2 1
## 18214 94 32 2 1
## 18215 95 32 2 1
## 18216 96 32 2 1
## 18217 97 32 2 1
## 18218 98 32 2 1
## 18219 99 32 2 1
## 18220 100 32 2 1
## 18221 101 32 2 1
## 18222 102 32 2 1
## 18223 103 32 2 1
## 18224 104 32 2 1
## 18225 105 32 2 1
## 18226 106 32 2 1
## 18227 107 32 2 1
## 18228 108 32 2 1
## 18229 109 32 2 1
## 18230 110 32 2 1
## 18231 111 32 2 1
## 18232 112 32 2 1
## 18233 113 32 2 1
## 18234 114 32 2 1
## 18235 115 32 2 1
## 18236 116 32 2 1
## 18237 117 32 2 1
## 18238 118 32 2 1
## 18239 119 32 2 1
## 18240 120 32 2 1
## 18241 1 33 2 1
## 18242 2 33 2 1
## 18243 3 33 2 1
## 18244 4 33 2 1
## 18245 5 33 2 1
## 18246 6 33 2 1
## 18247 7 33 2 1
## 18248 8 33 2 1
## 18249 9 33 2 1
## 18250 10 33 2 1
## 18251 11 33 2 1
## 18252 12 33 2 1
## 18253 13 33 2 1
## 18254 14 33 2 1
## 18255 15 33 2 1
## 18256 16 33 2 1
## 18257 17 33 2 1
## 18258 18 33 2 1
## 18259 19 33 2 1
## 18260 20 33 2 1
## 18261 21 33 2 1
## 18262 22 33 2 1
## 18263 23 33 2 1
## 18264 24 33 2 1
## 18265 25 33 2 1
## 18266 26 33 2 1
## 18267 27 33 2 1
## 18268 28 33 2 1
## 18269 29 33 2 1
## 18270 30 33 2 1
## 18271 31 33 2 1
## 18272 32 33 2 1
## 18273 33 33 2 1
## 18274 34 33 2 1
## 18275 35 33 2 1
## 18276 36 33 2 1
## 18277 37 33 2 1
## 18278 38 33 2 1
## 18279 39 33 2 1
## 18280 40 33 2 1
## 18281 41 33 2 1
## 18282 42 33 2 1
## 18283 43 33 2 1
## 18284 44 33 2 1
## 18285 45 33 2 1
## 18286 46 33 2 1
## 18287 47 33 2 1
## 18288 48 33 2 1
## 18289 49 33 2 1
## 18290 50 33 2 1
## 18291 51 33 2 1
## 18292 52 33 2 1
## 18293 53 33 2 1
## 18294 54 33 2 1
## 18295 55 33 2 1
## 18296 56 33 2 1
## 18297 57 33 2 1
## 18298 58 33 2 1
## 18299 59 33 2 1
## 18300 60 33 2 1
## 18301 61 33 2 1
## 18302 62 33 2 1
## 18303 63 33 2 1
## 18304 64 33 2 1
## 18305 65 33 2 1
## 18306 66 33 2 1
## 18307 67 33 2 1
## 18308 68 33 2 1
## 18309 69 33 2 1
## 18310 70 33 2 1
## 18311 71 33 2 1
## 18312 72 33 2 1
## 18313 73 33 2 1
## 18314 74 33 2 1
## 18315 75 33 2 1
## 18316 76 33 2 1
## 18317 77 33 2 1
## 18318 78 33 2 1
## 18319 79 33 2 1
## 18320 80 33 2 1
## 18321 81 33 2 1
## 18322 82 33 2 1
## 18323 83 33 2 1
## 18324 84 33 2 1
## 18325 85 33 2 1
## 18326 86 33 2 1
## 18327 87 33 2 1
## 18328 88 33 2 1
## 18329 89 33 2 1
## 18330 90 33 2 1
## 18331 91 33 2 1
## 18332 92 33 2 1
## 18333 93 33 2 1
## 18334 94 33 2 1
## 18335 95 33 2 1
## 18336 96 33 2 1
## 18337 97 33 2 1
## 18338 98 33 2 1
## 18339 99 33 2 1
## 18340 100 33 2 1
## 18341 101 33 2 1
## 18342 102 33 2 1
## 18343 103 33 2 1
## 18344 104 33 2 1
## 18345 105 33 2 1
## 18346 106 33 2 1
## 18347 107 33 2 1
## 18348 108 33 2 1
## 18349 109 33 2 1
## 18350 110 33 2 1
## 18351 111 33 2 1
## 18352 112 33 2 1
## 18353 113 33 2 1
## 18354 114 33 2 1
## 18355 115 33 2 1
## 18356 116 33 2 1
## 18357 117 33 2 1
## 18358 118 33 2 1
## 18359 119 33 2 1
## 18360 120 33 2 1
## 18361 1 34 2 1
## 18362 2 34 2 1
## 18363 3 34 2 1
## 18364 4 34 2 1
## 18365 5 34 2 1
## 18366 6 34 2 1
## 18367 7 34 2 1
## 18368 8 34 2 1
## 18369 9 34 2 1
## 18370 10 34 2 1
## 18371 11 34 2 1
## 18372 12 34 2 1
## 18373 13 34 2 1
## 18374 14 34 2 1
## 18375 15 34 2 1
## 18376 16 34 2 1
## 18377 17 34 2 1
## 18378 18 34 2 1
## 18379 19 34 2 1
## 18380 20 34 2 1
## 18381 21 34 2 1
## 18382 22 34 2 1
## 18383 23 34 2 1
## 18384 24 34 2 1
## 18385 25 34 2 1
## 18386 26 34 2 1
## 18387 27 34 2 1
## 18388 28 34 2 1
## 18389 29 34 2 1
## 18390 30 34 2 1
## 18391 31 34 2 1
## 18392 32 34 2 1
## 18393 33 34 2 1
## 18394 34 34 2 1
## 18395 35 34 2 1
## 18396 36 34 2 1
## 18397 37 34 2 1
## 18398 38 34 2 1
## 18399 39 34 2 1
## 18400 40 34 2 1
## 18401 41 34 2 1
## 18402 42 34 2 1
## 18403 43 34 2 1
## 18404 44 34 2 1
## 18405 45 34 2 1
## 18406 46 34 2 1
## 18407 47 34 2 1
## 18408 48 34 2 1
## 18409 49 34 2 1
## 18410 50 34 2 1
## 18411 51 34 2 1
## 18412 52 34 2 1
## 18413 53 34 2 1
## 18414 54 34 2 1
## 18415 55 34 2 1
## 18416 56 34 2 1
## 18417 57 34 2 1
## 18418 58 34 2 1
## 18419 59 34 2 1
## 18420 60 34 2 1
## 18421 61 34 2 1
## 18422 62 34 2 1
## 18423 63 34 2 1
## 18424 64 34 2 1
## 18425 65 34 2 1
## 18426 66 34 2 1
## 18427 67 34 2 1
## 18428 68 34 2 1
## 18429 69 34 2 1
## 18430 70 34 2 1
## 18431 71 34 2 1
## 18432 72 34 2 1
## 18433 73 34 2 1
## 18434 74 34 2 1
## 18435 75 34 2 1
## 18436 76 34 2 1
## 18437 77 34 2 1
## 18438 78 34 2 1
## 18439 79 34 2 1
## 18440 80 34 2 1
## 18441 81 34 2 1
## 18442 82 34 2 1
## 18443 83 34 2 1
## 18444 84 34 2 1
## 18445 85 34 2 1
## 18446 86 34 2 1
## 18447 87 34 2 1
## 18448 88 34 2 1
## 18449 89 34 2 1
## 18450 90 34 2 1
## 18451 91 34 2 1
## 18452 92 34 2 1
## 18453 93 34 2 1
## 18454 94 34 2 1
## 18455 95 34 2 1
## 18456 96 34 2 1
## 18457 97 34 2 1
## 18458 98 34 2 1
## 18459 99 34 2 1
## 18460 100 34 2 1
## 18461 101 34 2 1
## 18462 102 34 2 1
## 18463 103 34 2 1
## 18464 104 34 2 1
## 18465 105 34 2 1
## 18466 106 34 2 1
## 18467 107 34 2 1
## 18468 108 34 2 1
## 18469 109 34 2 1
## 18470 110 34 2 1
## 18471 111 34 2 1
## 18472 112 34 2 1
## 18473 113 34 2 1
## 18474 114 34 2 1
## 18475 115 34 2 1
## 18476 116 34 2 1
## 18477 117 34 2 1
## 18478 118 34 2 1
## 18479 119 34 2 1
## 18480 120 34 2 1
## 18481 1 35 2 1
## 18482 2 35 2 1
## 18483 3 35 2 1
## 18484 4 35 2 1
## 18485 5 35 2 1
## 18486 6 35 2 1
## 18487 7 35 2 1
## 18488 8 35 2 1
## 18489 9 35 2 1
## 18490 10 35 2 1
## 18491 11 35 2 1
## 18492 12 35 2 1
## 18493 13 35 2 1
## 18494 14 35 2 1
## 18495 15 35 2 1
## 18496 16 35 2 1
## 18497 17 35 2 1
## 18498 18 35 2 1
## 18499 19 35 2 1
## 18500 20 35 2 1
## 18501 21 35 2 1
## 18502 22 35 2 1
## 18503 23 35 2 1
## 18504 24 35 2 1
## 18505 25 35 2 1
## 18506 26 35 2 1
## 18507 27 35 2 1
## 18508 28 35 2 1
## 18509 29 35 2 1
## 18510 30 35 2 1
## 18511 31 35 2 1
## 18512 32 35 2 1
## 18513 33 35 2 1
## 18514 34 35 2 1
## 18515 35 35 2 1
## 18516 36 35 2 1
## 18517 37 35 2 1
## 18518 38 35 2 1
## 18519 39 35 2 1
## 18520 40 35 2 1
## 18521 41 35 2 1
## 18522 42 35 2 1
## 18523 43 35 2 1
## 18524 44 35 2 1
## 18525 45 35 2 1
## 18526 46 35 2 1
## 18527 47 35 2 1
## 18528 48 35 2 1
## 18529 49 35 2 1
## 18530 50 35 2 1
## 18531 51 35 2 1
## 18532 52 35 2 1
## 18533 53 35 2 1
## 18534 54 35 2 1
## 18535 55 35 2 1
## 18536 56 35 2 1
## 18537 57 35 2 1
## 18538 58 35 2 1
## 18539 59 35 2 1
## 18540 60 35 2 1
## 18541 61 35 2 1
## 18542 62 35 2 1
## 18543 63 35 2 1
## 18544 64 35 2 1
## 18545 65 35 2 1
## 18546 66 35 2 1
## 18547 67 35 2 1
## 18548 68 35 2 1
## 18549 69 35 2 1
## 18550 70 35 2 1
## 18551 71 35 2 1
## 18552 72 35 2 1
## 18553 73 35 2 1
## 18554 74 35 2 1
## 18555 75 35 2 1
## 18556 76 35 2 1
## 18557 77 35 2 1
## 18558 78 35 2 1
## 18559 79 35 2 1
## 18560 80 35 2 1
## 18561 81 35 2 1
## 18562 82 35 2 1
## 18563 83 35 2 1
## 18564 84 35 2 1
## 18565 85 35 2 1
## 18566 86 35 2 1
## 18567 87 35 2 1
## 18568 88 35 2 1
## 18569 89 35 2 1
## 18570 90 35 2 1
## 18571 91 35 2 1
## 18572 92 35 2 1
## 18573 93 35 2 1
## 18574 94 35 2 1
## 18575 95 35 2 1
## 18576 96 35 2 1
## 18577 97 35 2 1
## 18578 98 35 2 1
## 18579 99 35 2 1
## 18580 100 35 2 1
## 18581 101 35 2 1
## 18582 102 35 2 1
## 18583 103 35 2 1
## 18584 104 35 2 1
## 18585 105 35 2 1
## 18586 106 35 2 1
## 18587 107 35 2 1
## 18588 108 35 2 1
## 18589 109 35 2 1
## 18590 110 35 2 1
## 18591 111 35 2 1
## 18592 112 35 2 1
## 18593 113 35 2 1
## 18594 114 35 2 1
## 18595 115 35 2 1
## 18596 116 35 2 1
## 18597 117 35 2 1
## 18598 118 35 2 1
## 18599 119 35 2 1
## 18600 120 35 2 1
## 18601 1 36 2 1
## 18602 2 36 2 1
## 18603 3 36 2 1
## 18604 4 36 2 1
## 18605 5 36 2 1
## 18606 6 36 2 1
## 18607 7 36 2 1
## 18608 8 36 2 1
## 18609 9 36 2 1
## 18610 10 36 2 1
## 18611 11 36 2 1
## 18612 12 36 2 1
## 18613 13 36 2 1
## 18614 14 36 2 1
## 18615 15 36 2 1
## 18616 16 36 2 1
## 18617 17 36 2 1
## 18618 18 36 2 1
## 18619 19 36 2 1
## 18620 20 36 2 1
## 18621 21 36 2 1
## 18622 22 36 2 1
## 18623 23 36 2 1
## 18624 24 36 2 1
## 18625 25 36 2 1
## 18626 26 36 2 1
## 18627 27 36 2 1
## 18628 28 36 2 1
## 18629 29 36 2 1
## 18630 30 36 2 1
## 18631 31 36 2 1
## 18632 32 36 2 1
## 18633 33 36 2 1
## 18634 34 36 2 1
## 18635 35 36 2 1
## 18636 36 36 2 1
## 18637 37 36 2 1
## 18638 38 36 2 1
## 18639 39 36 2 1
## 18640 40 36 2 1
## 18641 41 36 2 1
## 18642 42 36 2 1
## 18643 43 36 2 1
## 18644 44 36 2 1
## 18645 45 36 2 1
## 18646 46 36 2 1
## 18647 47 36 2 1
## 18648 48 36 2 1
## 18649 49 36 2 1
## 18650 50 36 2 1
## 18651 51 36 2 1
## 18652 52 36 2 1
## 18653 53 36 2 1
## 18654 54 36 2 1
## 18655 55 36 2 1
## 18656 56 36 2 1
## 18657 57 36 2 1
## 18658 58 36 2 1
## 18659 59 36 2 1
## 18660 60 36 2 1
## 18661 61 36 2 1
## 18662 62 36 2 1
## 18663 63 36 2 1
## 18664 64 36 2 1
## 18665 65 36 2 1
## 18666 66 36 2 1
## 18667 67 36 2 1
## 18668 68 36 2 1
## 18669 69 36 2 1
## 18670 70 36 2 1
## 18671 71 36 2 1
## 18672 72 36 2 1
## 18673 73 36 2 1
## 18674 74 36 2 1
## 18675 75 36 2 1
## 18676 76 36 2 1
## 18677 77 36 2 1
## 18678 78 36 2 1
## 18679 79 36 2 1
## 18680 80 36 2 1
## 18681 81 36 2 1
## 18682 82 36 2 1
## 18683 83 36 2 1
## 18684 84 36 2 1
## 18685 85 36 2 1
## 18686 86 36 2 1
## 18687 87 36 2 1
## 18688 88 36 2 1
## 18689 89 36 2 1
## 18690 90 36 2 1
## 18691 91 36 2 1
## 18692 92 36 2 1
## 18693 93 36 2 1
## 18694 94 36 2 1
## 18695 95 36 2 1
## 18696 96 36 2 1
## 18697 97 36 2 1
## 18698 98 36 2 1
## 18699 99 36 2 1
## 18700 100 36 2 1
## 18701 101 36 2 1
## 18702 102 36 2 1
## 18703 103 36 2 1
## 18704 104 36 2 1
## 18705 105 36 2 1
## 18706 106 36 2 1
## 18707 107 36 2 1
## 18708 108 36 2 1
## 18709 109 36 2 1
## 18710 110 36 2 1
## 18711 111 36 2 1
## 18712 112 36 2 1
## 18713 113 36 2 1
## 18714 114 36 2 1
## 18715 115 36 2 1
## 18716 116 36 2 1
## 18717 117 36 2 1
## 18718 118 36 2 1
## 18719 119 36 2 1
## 18720 120 36 2 1
## 18721 1 37 2 1
## 18722 2 37 2 1
## 18723 3 37 2 1
## 18724 4 37 2 1
## 18725 5 37 2 1
## 18726 6 37 2 1
## 18727 7 37 2 1
## 18728 8 37 2 1
## 18729 9 37 2 1
## 18730 10 37 2 1
## 18731 11 37 2 1
## 18732 12 37 2 1
## 18733 13 37 2 1
## 18734 14 37 2 1
## 18735 15 37 2 1
## 18736 16 37 2 1
## 18737 17 37 2 1
## 18738 18 37 2 1
## 18739 19 37 2 1
## 18740 20 37 2 1
## 18741 21 37 2 1
## 18742 22 37 2 1
## 18743 23 37 2 1
## 18744 24 37 2 1
## 18745 25 37 2 1
## 18746 26 37 2 1
## 18747 27 37 2 1
## 18748 28 37 2 1
## 18749 29 37 2 1
## 18750 30 37 2 1
## 18751 31 37 2 1
## 18752 32 37 2 1
## 18753 33 37 2 1
## 18754 34 37 2 1
## 18755 35 37 2 1
## 18756 36 37 2 1
## 18757 37 37 2 1
## 18758 38 37 2 1
## 18759 39 37 2 1
## 18760 40 37 2 1
## 18761 41 37 2 1
## 18762 42 37 2 1
## 18763 43 37 2 1
## 18764 44 37 2 1
## 18765 45 37 2 1
## 18766 46 37 2 1
## 18767 47 37 2 1
## 18768 48 37 2 1
## 18769 49 37 2 1
## 18770 50 37 2 1
## 18771 51 37 2 1
## 18772 52 37 2 1
## 18773 53 37 2 1
## 18774 54 37 2 1
## 18775 55 37 2 1
## 18776 56 37 2 1
## 18777 57 37 2 1
## 18778 58 37 2 1
## 18779 59 37 2 1
## 18780 60 37 2 1
## 18781 61 37 2 1
## 18782 62 37 2 1
## 18783 63 37 2 1
## 18784 64 37 2 1
## 18785 65 37 2 1
## 18786 66 37 2 1
## 18787 67 37 2 1
## 18788 68 37 2 1
## 18789 69 37 2 1
## 18790 70 37 2 1
## 18791 71 37 2 1
## 18792 72 37 2 1
## 18793 73 37 2 1
## 18794 74 37 2 1
## 18795 75 37 2 1
## 18796 76 37 2 1
## 18797 77 37 2 1
## 18798 78 37 2 1
## 18799 79 37 2 1
## 18800 80 37 2 1
## 18801 81 37 2 1
## 18802 82 37 2 1
## 18803 83 37 2 1
## 18804 84 37 2 1
## 18805 85 37 2 1
## 18806 86 37 2 1
## 18807 87 37 2 1
## 18808 88 37 2 1
## 18809 89 37 2 1
## 18810 90 37 2 1
## 18811 91 37 2 1
## 18812 92 37 2 1
## 18813 93 37 2 1
## 18814 94 37 2 1
## 18815 95 37 2 1
## 18816 96 37 2 1
## 18817 97 37 2 1
## 18818 98 37 2 1
## 18819 99 37 2 1
## 18820 100 37 2 1
## 18821 101 37 2 1
## 18822 102 37 2 1
## 18823 103 37 2 1
## 18824 104 37 2 1
## 18825 105 37 2 1
## 18826 106 37 2 1
## 18827 107 37 2 1
## 18828 108 37 2 1
## 18829 109 37 2 1
## 18830 110 37 2 1
## 18831 111 37 2 1
## 18832 112 37 2 1
## 18833 113 37 2 1
## 18834 114 37 2 1
## 18835 115 37 2 1
## 18836 116 37 2 1
## 18837 117 37 2 1
## 18838 118 37 2 1
## 18839 119 37 2 1
## 18840 120 37 2 1
## 18841 1 38 2 1
## 18842 2 38 2 1
## 18843 3 38 2 1
## 18844 4 38 2 1
## 18845 5 38 2 1
## 18846 6 38 2 1
## 18847 7 38 2 1
## 18848 8 38 2 1
## 18849 9 38 2 1
## 18850 10 38 2 1
## 18851 11 38 2 1
## 18852 12 38 2 1
## 18853 13 38 2 1
## 18854 14 38 2 1
## 18855 15 38 2 1
## 18856 16 38 2 1
## 18857 17 38 2 1
## 18858 18 38 2 1
## 18859 19 38 2 1
## 18860 20 38 2 1
## 18861 21 38 2 1
## 18862 22 38 2 1
## 18863 23 38 2 1
## 18864 24 38 2 1
## 18865 25 38 2 1
## 18866 26 38 2 1
## 18867 27 38 2 1
## 18868 28 38 2 1
## 18869 29 38 2 1
## 18870 30 38 2 1
## 18871 31 38 2 1
## 18872 32 38 2 1
## 18873 33 38 2 1
## 18874 34 38 2 1
## 18875 35 38 2 1
## 18876 36 38 2 1
## 18877 37 38 2 1
## 18878 38 38 2 1
## 18879 39 38 2 1
## 18880 40 38 2 1
## 18881 41 38 2 1
## 18882 42 38 2 1
## 18883 43 38 2 1
## 18884 44 38 2 1
## 18885 45 38 2 1
## 18886 46 38 2 1
## 18887 47 38 2 1
## 18888 48 38 2 1
## 18889 49 38 2 1
## 18890 50 38 2 1
## 18891 51 38 2 1
## 18892 52 38 2 1
## 18893 53 38 2 1
## 18894 54 38 2 1
## 18895 55 38 2 1
## 18896 56 38 2 1
## 18897 57 38 2 1
## 18898 58 38 2 1
## 18899 59 38 2 1
## 18900 60 38 2 1
## 18901 61 38 2 1
## 18902 62 38 2 1
## 18903 63 38 2 1
## 18904 64 38 2 1
## 18905 65 38 2 1
## 18906 66 38 2 1
## 18907 67 38 2 1
## 18908 68 38 2 1
## 18909 69 38 2 1
## 18910 70 38 2 1
## 18911 71 38 2 1
## 18912 72 38 2 1
## 18913 73 38 2 1
## 18914 74 38 2 1
## 18915 75 38 2 1
## 18916 76 38 2 1
## 18917 77 38 2 1
## 18918 78 38 2 1
## 18919 79 38 2 1
## 18920 80 38 2 1
## 18921 81 38 2 1
## 18922 82 38 2 1
## 18923 83 38 2 1
## 18924 84 38 2 1
## 18925 85 38 2 1
## 18926 86 38 2 1
## 18927 87 38 2 1
## 18928 88 38 2 1
## 18929 89 38 2 1
## 18930 90 38 2 1
## 18931 91 38 2 1
## 18932 92 38 2 1
## 18933 93 38 2 1
## 18934 94 38 2 1
## 18935 95 38 2 1
## 18936 96 38 2 1
## 18937 97 38 2 1
## 18938 98 38 2 1
## 18939 99 38 2 1
## 18940 100 38 2 1
## 18941 101 38 2 1
## 18942 102 38 2 1
## 18943 103 38 2 1
## 18944 104 38 2 1
## 18945 105 38 2 1
## 18946 106 38 2 1
## 18947 107 38 2 1
## 18948 108 38 2 1
## 18949 109 38 2 1
## 18950 110 38 2 1
## 18951 111 38 2 1
## 18952 112 38 2 1
## 18953 113 38 2 1
## 18954 114 38 2 1
## 18955 115 38 2 1
## 18956 116 38 2 1
## 18957 117 38 2 1
## 18958 118 38 2 1
## 18959 119 38 2 1
## 18960 120 38 2 1
## 18961 1 39 2 1
## 18962 2 39 2 1
## 18963 3 39 2 1
## 18964 4 39 2 1
## 18965 5 39 2 1
## 18966 6 39 2 1
## 18967 7 39 2 1
## 18968 8 39 2 1
## 18969 9 39 2 1
## 18970 10 39 2 1
## 18971 11 39 2 1
## 18972 12 39 2 1
## 18973 13 39 2 1
## 18974 14 39 2 1
## 18975 15 39 2 1
## 18976 16 39 2 1
## 18977 17 39 2 1
## 18978 18 39 2 1
## 18979 19 39 2 1
## 18980 20 39 2 1
## 18981 21 39 2 1
## 18982 22 39 2 1
## 18983 23 39 2 1
## 18984 24 39 2 1
## 18985 25 39 2 1
## 18986 26 39 2 1
## 18987 27 39 2 1
## 18988 28 39 2 1
## 18989 29 39 2 1
## 18990 30 39 2 1
## 18991 31 39 2 1
## 18992 32 39 2 1
## 18993 33 39 2 1
## 18994 34 39 2 1
## 18995 35 39 2 1
## 18996 36 39 2 1
## 18997 37 39 2 1
## 18998 38 39 2 1
## 18999 39 39 2 1
## 19000 40 39 2 1
## 19001 41 39 2 1
## 19002 42 39 2 1
## 19003 43 39 2 1
## 19004 44 39 2 1
## 19005 45 39 2 1
## 19006 46 39 2 1
## 19007 47 39 2 1
## 19008 48 39 2 1
## 19009 49 39 2 1
## 19010 50 39 2 1
## 19011 51 39 2 1
## 19012 52 39 2 1
## 19013 53 39 2 1
## 19014 54 39 2 1
## 19015 55 39 2 1
## 19016 56 39 2 1
## 19017 57 39 2 1
## 19018 58 39 2 1
## 19019 59 39 2 1
## 19020 60 39 2 1
## 19021 61 39 2 1
## 19022 62 39 2 1
## 19023 63 39 2 1
## 19024 64 39 2 1
## 19025 65 39 2 1
## 19026 66 39 2 1
## 19027 67 39 2 1
## 19028 68 39 2 1
## 19029 69 39 2 1
## 19030 70 39 2 1
## 19031 71 39 2 1
## 19032 72 39 2 1
## 19033 73 39 2 1
## 19034 74 39 2 1
## 19035 75 39 2 1
## 19036 76 39 2 1
## 19037 77 39 2 1
## 19038 78 39 2 1
## 19039 79 39 2 1
## 19040 80 39 2 1
## 19041 81 39 2 1
## 19042 82 39 2 1
## 19043 83 39 2 1
## 19044 84 39 2 1
## 19045 85 39 2 1
## 19046 86 39 2 1
## 19047 87 39 2 1
## 19048 88 39 2 1
## 19049 89 39 2 1
## 19050 90 39 2 1
## 19051 91 39 2 1
## 19052 92 39 2 1
## 19053 93 39 2 1
## 19054 94 39 2 1
## 19055 95 39 2 1
## 19056 96 39 2 1
## 19057 97 39 2 1
## 19058 98 39 2 1
## 19059 99 39 2 1
## 19060 100 39 2 1
## 19061 101 39 2 1
## 19062 102 39 2 1
## 19063 103 39 2 1
## 19064 104 39 2 1
## 19065 105 39 2 1
## 19066 106 39 2 1
## 19067 107 39 2 1
## 19068 108 39 2 1
## 19069 109 39 2 1
## 19070 110 39 2 1
## 19071 111 39 2 1
## 19072 112 39 2 1
## 19073 113 39 2 1
## 19074 114 39 2 1
## 19075 115 39 2 1
## 19076 116 39 2 1
## 19077 117 39 2 1
## 19078 118 39 2 1
## 19079 119 39 2 1
## 19080 120 39 2 1
## 19081 1 40 2 1
## 19082 2 40 2 1
## 19083 3 40 2 1
## 19084 4 40 2 1
## 19085 5 40 2 1
## 19086 6 40 2 1
## 19087 7 40 2 1
## 19088 8 40 2 1
## 19089 9 40 2 1
## 19090 10 40 2 1
## 19091 11 40 2 1
## 19092 12 40 2 1
## 19093 13 40 2 1
## 19094 14 40 2 1
## 19095 15 40 2 1
## 19096 16 40 2 1
## 19097 17 40 2 1
## 19098 18 40 2 1
## 19099 19 40 2 1
## 19100 20 40 2 1
## 19101 21 40 2 1
## 19102 22 40 2 1
## 19103 23 40 2 1
## 19104 24 40 2 1
## 19105 25 40 2 1
## 19106 26 40 2 1
## 19107 27 40 2 1
## 19108 28 40 2 1
## 19109 29 40 2 1
## 19110 30 40 2 1
## 19111 31 40 2 1
## 19112 32 40 2 1
## 19113 33 40 2 1
## 19114 34 40 2 1
## 19115 35 40 2 1
## 19116 36 40 2 1
## 19117 37 40 2 1
## 19118 38 40 2 1
## 19119 39 40 2 1
## 19120 40 40 2 1
## 19121 41 40 2 1
## 19122 42 40 2 1
## 19123 43 40 2 1
## 19124 44 40 2 1
## 19125 45 40 2 1
## 19126 46 40 2 1
## 19127 47 40 2 1
## 19128 48 40 2 1
## 19129 49 40 2 1
## 19130 50 40 2 1
## 19131 51 40 2 1
## 19132 52 40 2 1
## 19133 53 40 2 1
## 19134 54 40 2 1
## 19135 55 40 2 1
## 19136 56 40 2 1
## 19137 57 40 2 1
## 19138 58 40 2 1
## 19139 59 40 2 1
## 19140 60 40 2 1
## 19141 61 40 2 1
## 19142 62 40 2 1
## 19143 63 40 2 1
## 19144 64 40 2 1
## 19145 65 40 2 1
## 19146 66 40 2 1
## 19147 67 40 2 1
## 19148 68 40 2 1
## 19149 69 40 2 1
## 19150 70 40 2 1
## 19151 71 40 2 1
## 19152 72 40 2 1
## 19153 73 40 2 1
## 19154 74 40 2 1
## 19155 75 40 2 1
## 19156 76 40 2 1
## 19157 77 40 2 1
## 19158 78 40 2 1
## 19159 79 40 2 1
## 19160 80 40 2 1
## 19161 81 40 2 1
## 19162 82 40 2 1
## 19163 83 40 2 1
## 19164 84 40 2 1
## 19165 85 40 2 1
## 19166 86 40 2 1
## 19167 87 40 2 1
## 19168 88 40 2 1
## 19169 89 40 2 1
## 19170 90 40 2 1
## 19171 91 40 2 1
## 19172 92 40 2 1
## 19173 93 40 2 1
## 19174 94 40 2 1
## 19175 95 40 2 1
## 19176 96 40 2 1
## 19177 97 40 2 1
## 19178 98 40 2 1
## 19179 99 40 2 1
## 19180 100 40 2 1
## 19181 101 40 2 1
## 19182 102 40 2 1
## 19183 103 40 2 1
## 19184 104 40 2 1
## 19185 105 40 2 1
## 19186 106 40 2 1
## 19187 107 40 2 1
## 19188 108 40 2 1
## 19189 109 40 2 1
## 19190 110 40 2 1
## 19191 111 40 2 1
## 19192 112 40 2 1
## 19193 113 40 2 1
## 19194 114 40 2 1
## 19195 115 40 2 1
## 19196 116 40 2 1
## 19197 117 40 2 1
## 19198 118 40 2 1
## 19199 119 40 2 1
## 19200 120 40 2 1
## 19201 1 41 2 1
## 19202 2 41 2 1
## 19203 3 41 2 1
## 19204 4 41 2 1
## 19205 5 41 2 1
## 19206 6 41 2 1
## 19207 7 41 2 1
## 19208 8 41 2 1
## 19209 9 41 2 1
## 19210 10 41 2 1
## 19211 11 41 2 1
## 19212 12 41 2 1
## 19213 13 41 2 1
## 19214 14 41 2 1
## 19215 15 41 2 1
## 19216 16 41 2 1
## 19217 17 41 2 1
## 19218 18 41 2 1
## 19219 19 41 2 1
## 19220 20 41 2 1
## 19221 21 41 2 1
## 19222 22 41 2 1
## 19223 23 41 2 1
## 19224 24 41 2 1
## 19225 25 41 2 1
## 19226 26 41 2 1
## 19227 27 41 2 1
## 19228 28 41 2 1
## 19229 29 41 2 1
## 19230 30 41 2 1
## 19231 31 41 2 1
## 19232 32 41 2 1
## 19233 33 41 2 1
## 19234 34 41 2 1
## 19235 35 41 2 1
## 19236 36 41 2 1
## 19237 37 41 2 1
## 19238 38 41 2 1
## 19239 39 41 2 1
## 19240 40 41 2 1
## 19241 41 41 2 1
## 19242 42 41 2 1
## 19243 43 41 2 1
## 19244 44 41 2 1
## 19245 45 41 2 1
## 19246 46 41 2 1
## 19247 47 41 2 1
## 19248 48 41 2 1
## 19249 49 41 2 1
## 19250 50 41 2 1
## 19251 51 41 2 1
## 19252 52 41 2 1
## 19253 53 41 2 1
## 19254 54 41 2 1
## 19255 55 41 2 1
## 19256 56 41 2 1
## 19257 57 41 2 1
## 19258 58 41 2 1
## 19259 59 41 2 1
## 19260 60 41 2 1
## 19261 61 41 2 1
## 19262 62 41 2 1
## 19263 63 41 2 1
## 19264 64 41 2 1
## 19265 65 41 2 1
## 19266 66 41 2 1
## 19267 67 41 2 1
## 19268 68 41 2 1
## 19269 69 41 2 1
## 19270 70 41 2 1
## 19271 71 41 2 1
## 19272 72 41 2 1
## 19273 73 41 2 1
## 19274 74 41 2 1
## 19275 75 41 2 1
## 19276 76 41 2 1
## 19277 77 41 2 1
## 19278 78 41 2 1
## 19279 79 41 2 1
## 19280 80 41 2 1
## 19281 81 41 2 1
## 19282 82 41 2 1
## 19283 83 41 2 1
## 19284 84 41 2 1
## 19285 85 41 2 1
## 19286 86 41 2 1
## 19287 87 41 2 1
## 19288 88 41 2 1
## 19289 89 41 2 1
## 19290 90 41 2 1
## 19291 91 41 2 1
## 19292 92 41 2 1
## 19293 93 41 2 1
## 19294 94 41 2 1
## 19295 95 41 2 1
## 19296 96 41 2 1
## 19297 97 41 2 1
## 19298 98 41 2 1
## 19299 99 41 2 1
## 19300 100 41 2 1
## 19301 101 41 2 1
## 19302 102 41 2 1
## 19303 103 41 2 1
## 19304 104 41 2 1
## 19305 105 41 2 1
## 19306 106 41 2 1
## 19307 107 41 2 1
## 19308 108 41 2 1
## 19309 109 41 2 1
## 19310 110 41 2 1
## 19311 111 41 2 1
## 19312 112 41 2 1
## 19313 113 41 2 1
## 19314 114 41 2 1
## 19315 115 41 2 1
## 19316 116 41 2 1
## 19317 117 41 2 1
## 19318 118 41 2 1
## 19319 119 41 2 1
## 19320 120 41 2 1
## 19321 1 42 2 1
## 19322 2 42 2 1
## 19323 3 42 2 1
## 19324 4 42 2 1
## 19325 5 42 2 1
## 19326 6 42 2 1
## 19327 7 42 2 1
## 19328 8 42 2 1
## 19329 9 42 2 1
## 19330 10 42 2 1
## 19331 11 42 2 1
## 19332 12 42 2 1
## 19333 13 42 2 1
## 19334 14 42 2 1
## 19335 15 42 2 1
## 19336 16 42 2 1
## 19337 17 42 2 1
## 19338 18 42 2 1
## 19339 19 42 2 1
## 19340 20 42 2 1
## 19341 21 42 2 1
## 19342 22 42 2 1
## 19343 23 42 2 1
## 19344 24 42 2 1
## 19345 25 42 2 1
## 19346 26 42 2 1
## 19347 27 42 2 1
## 19348 28 42 2 1
## 19349 29 42 2 1
## 19350 30 42 2 1
## 19351 31 42 2 1
## 19352 32 42 2 1
## 19353 33 42 2 1
## 19354 34 42 2 1
## 19355 35 42 2 1
## 19356 36 42 2 1
## 19357 37 42 2 1
## 19358 38 42 2 1
## 19359 39 42 2 1
## 19360 40 42 2 1
## 19361 41 42 2 1
## 19362 42 42 2 1
## 19363 43 42 2 1
## 19364 44 42 2 1
## 19365 45 42 2 1
## 19366 46 42 2 1
## 19367 47 42 2 1
## 19368 48 42 2 1
## 19369 49 42 2 1
## 19370 50 42 2 1
## 19371 51 42 2 1
## 19372 52 42 2 1
## 19373 53 42 2 1
## 19374 54 42 2 1
## 19375 55 42 2 1
## 19376 56 42 2 1
## 19377 57 42 2 1
## 19378 58 42 2 1
## 19379 59 42 2 1
## 19380 60 42 2 1
## 19381 61 42 2 1
## 19382 62 42 2 1
## 19383 63 42 2 1
## 19384 64 42 2 1
## 19385 65 42 2 1
## 19386 66 42 2 1
## 19387 67 42 2 1
## 19388 68 42 2 1
## 19389 69 42 2 1
## 19390 70 42 2 1
## 19391 71 42 2 1
## 19392 72 42 2 1
## 19393 73 42 2 1
## 19394 74 42 2 1
## 19395 75 42 2 1
## 19396 76 42 2 1
## 19397 77 42 2 1
## 19398 78 42 2 1
## 19399 79 42 2 1
## 19400 80 42 2 1
## 19401 81 42 2 1
## 19402 82 42 2 1
## 19403 83 42 2 1
## 19404 84 42 2 1
## 19405 85 42 2 1
## 19406 86 42 2 1
## 19407 87 42 2 1
## 19408 88 42 2 1
## 19409 89 42 2 1
## 19410 90 42 2 1
## 19411 91 42 2 1
## 19412 92 42 2 1
## 19413 93 42 2 1
## 19414 94 42 2 1
## 19415 95 42 2 1
## 19416 96 42 2 1
## 19417 97 42 2 1
## 19418 98 42 2 1
## 19419 99 42 2 1
## 19420 100 42 2 1
## 19421 101 42 2 1
## 19422 102 42 2 1
## 19423 103 42 2 1
## 19424 104 42 2 1
## 19425 105 42 2 1
## 19426 106 42 2 1
## 19427 107 42 2 1
## 19428 108 42 2 1
## 19429 109 42 2 1
## 19430 110 42 2 1
## 19431 111 42 2 1
## 19432 112 42 2 1
## 19433 113 42 2 1
## 19434 114 42 2 1
## 19435 115 42 2 1
## 19436 116 42 2 1
## 19437 117 42 2 1
## 19438 118 42 2 1
## 19439 119 42 2 1
## 19440 120 42 2 1
## 19441 1 43 2 1
## 19442 2 43 2 1
## 19443 3 43 2 1
## 19444 4 43 2 1
## 19445 5 43 2 1
## 19446 6 43 2 1
## 19447 7 43 2 1
## 19448 8 43 2 1
## 19449 9 43 2 1
## 19450 10 43 2 1
## 19451 11 43 2 1
## 19452 12 43 2 1
## 19453 13 43 2 1
## 19454 14 43 2 1
## 19455 15 43 2 1
## 19456 16 43 2 1
## 19457 17 43 2 1
## 19458 18 43 2 1
## 19459 19 43 2 1
## 19460 20 43 2 1
## 19461 21 43 2 1
## 19462 22 43 2 1
## 19463 23 43 2 1
## 19464 24 43 2 1
## 19465 25 43 2 1
## 19466 26 43 2 1
## 19467 27 43 2 1
## 19468 28 43 2 1
## 19469 29 43 2 1
## 19470 30 43 2 1
## 19471 31 43 2 1
## 19472 32 43 2 1
## 19473 33 43 2 1
## 19474 34 43 2 1
## 19475 35 43 2 1
## 19476 36 43 2 1
## 19477 37 43 2 1
## 19478 38 43 2 1
## 19479 39 43 2 1
## 19480 40 43 2 1
## 19481 41 43 2 1
## 19482 42 43 2 1
## 19483 43 43 2 1
## 19484 44 43 2 1
## 19485 45 43 2 1
## 19486 46 43 2 1
## 19487 47 43 2 1
## 19488 48 43 2 1
## 19489 49 43 2 1
## 19490 50 43 2 1
## 19491 51 43 2 1
## 19492 52 43 2 1
## 19493 53 43 2 1
## 19494 54 43 2 1
## 19495 55 43 2 1
## 19496 56 43 2 1
## 19497 57 43 2 1
## 19498 58 43 2 1
## 19499 59 43 2 1
## 19500 60 43 2 1
## 19501 61 43 2 1
## 19502 62 43 2 1
## 19503 63 43 2 1
## 19504 64 43 2 1
## 19505 65 43 2 1
## 19506 66 43 2 1
## 19507 67 43 2 1
## 19508 68 43 2 1
## 19509 69 43 2 1
## 19510 70 43 2 1
## 19511 71 43 2 1
## 19512 72 43 2 1
## 19513 73 43 2 1
## 19514 74 43 2 1
## 19515 75 43 2 1
## 19516 76 43 2 1
## 19517 77 43 2 1
## 19518 78 43 2 1
## 19519 79 43 2 1
## 19520 80 43 2 1
## 19521 81 43 2 1
## 19522 82 43 2 1
## 19523 83 43 2 1
## 19524 84 43 2 1
## 19525 85 43 2 1
## 19526 86 43 2 1
## 19527 87 43 2 1
## 19528 88 43 2 1
## 19529 89 43 2 1
## 19530 90 43 2 1
## 19531 91 43 2 1
## 19532 92 43 2 1
## 19533 93 43 2 1
## 19534 94 43 2 1
## 19535 95 43 2 1
## 19536 96 43 2 1
## 19537 97 43 2 1
## 19538 98 43 2 1
## 19539 99 43 2 1
## 19540 100 43 2 1
## 19541 101 43 2 1
## 19542 102 43 2 1
## 19543 103 43 2 1
## 19544 104 43 2 1
## 19545 105 43 2 1
## 19546 106 43 2 1
## 19547 107 43 2 1
## 19548 108 43 2 1
## 19549 109 43 2 1
## 19550 110 43 2 1
## 19551 111 43 2 1
## 19552 112 43 2 1
## 19553 113 43 2 1
## 19554 114 43 2 1
## 19555 115 43 2 1
## 19556 116 43 2 1
## 19557 117 43 2 1
## 19558 118 43 2 1
## 19559 119 43 2 1
## 19560 120 43 2 1
## 19561 1 44 2 1
## 19562 2 44 2 1
## 19563 3 44 2 1
## 19564 4 44 2 1
## 19565 5 44 2 1
## 19566 6 44 2 1
## 19567 7 44 2 1
## 19568 8 44 2 1
## 19569 9 44 2 1
## 19570 10 44 2 1
## 19571 11 44 2 1
## 19572 12 44 2 1
## 19573 13 44 2 1
## 19574 14 44 2 1
## 19575 15 44 2 1
## 19576 16 44 2 1
## 19577 17 44 2 1
## 19578 18 44 2 1
## 19579 19 44 2 1
## 19580 20 44 2 1
## 19581 21 44 2 1
## 19582 22 44 2 1
## 19583 23 44 2 1
## 19584 24 44 2 1
## 19585 25 44 2 1
## 19586 26 44 2 1
## 19587 27 44 2 1
## 19588 28 44 2 1
## 19589 29 44 2 1
## 19590 30 44 2 1
## 19591 31 44 2 1
## 19592 32 44 2 1
## 19593 33 44 2 1
## 19594 34 44 2 1
## 19595 35 44 2 1
## 19596 36 44 2 1
## 19597 37 44 2 1
## 19598 38 44 2 1
## 19599 39 44 2 1
## 19600 40 44 2 1
## 19601 41 44 2 1
## 19602 42 44 2 1
## 19603 43 44 2 1
## 19604 44 44 2 1
## 19605 45 44 2 1
## 19606 46 44 2 1
## 19607 47 44 2 1
## 19608 48 44 2 1
## 19609 49 44 2 1
## 19610 50 44 2 1
## 19611 51 44 2 1
## 19612 52 44 2 1
## 19613 53 44 2 1
## 19614 54 44 2 1
## 19615 55 44 2 1
## 19616 56 44 2 1
## 19617 57 44 2 1
## 19618 58 44 2 1
## 19619 59 44 2 1
## 19620 60 44 2 1
## 19621 61 44 2 1
## 19622 62 44 2 1
## 19623 63 44 2 1
## 19624 64 44 2 1
## 19625 65 44 2 1
## 19626 66 44 2 1
## 19627 67 44 2 1
## 19628 68 44 2 1
## 19629 69 44 2 1
## 19630 70 44 2 1
## 19631 71 44 2 1
## 19632 72 44 2 1
## 19633 73 44 2 1
## 19634 74 44 2 1
## 19635 75 44 2 1
## 19636 76 44 2 1
## 19637 77 44 2 1
## 19638 78 44 2 1
## 19639 79 44 2 1
## 19640 80 44 2 1
## 19641 81 44 2 1
## 19642 82 44 2 1
## 19643 83 44 2 1
## 19644 84 44 2 1
## 19645 85 44 2 1
## 19646 86 44 2 1
## 19647 87 44 2 1
## 19648 88 44 2 1
## 19649 89 44 2 1
## 19650 90 44 2 1
## 19651 91 44 2 1
## 19652 92 44 2 1
## 19653 93 44 2 1
## 19654 94 44 2 1
## 19655 95 44 2 1
## 19656 96 44 2 1
## 19657 97 44 2 1
## 19658 98 44 2 1
## 19659 99 44 2 1
## 19660 100 44 2 1
## 19661 101 44 2 1
## 19662 102 44 2 1
## 19663 103 44 2 1
## 19664 104 44 2 1
## 19665 105 44 2 1
## 19666 106 44 2 1
## 19667 107 44 2 1
## 19668 108 44 2 1
## 19669 109 44 2 1
## 19670 110 44 2 1
## 19671 111 44 2 1
## 19672 112 44 2 1
## 19673 113 44 2 1
## 19674 114 44 2 1
## 19675 115 44 2 1
## 19676 116 44 2 1
## 19677 117 44 2 1
## 19678 118 44 2 1
## 19679 119 44 2 1
## 19680 120 44 2 1
## 19681 1 45 2 1
## 19682 2 45 2 1
## 19683 3 45 2 1
## 19684 4 45 2 1
## 19685 5 45 2 1
## 19686 6 45 2 1
## 19687 7 45 2 1
## 19688 8 45 2 1
## 19689 9 45 2 1
## 19690 10 45 2 1
## 19691 11 45 2 1
## 19692 12 45 2 1
## 19693 13 45 2 1
## 19694 14 45 2 1
## 19695 15 45 2 1
## 19696 16 45 2 1
## 19697 17 45 2 1
## 19698 18 45 2 1
## 19699 19 45 2 1
## 19700 20 45 2 1
## 19701 21 45 2 1
## 19702 22 45 2 1
## 19703 23 45 2 1
## 19704 24 45 2 1
## 19705 25 45 2 1
## 19706 26 45 2 1
## 19707 27 45 2 1
## 19708 28 45 2 1
## 19709 29 45 2 1
## 19710 30 45 2 1
## 19711 31 45 2 1
## 19712 32 45 2 1
## 19713 33 45 2 1
## 19714 34 45 2 1
## 19715 35 45 2 1
## 19716 36 45 2 1
## 19717 37 45 2 1
## 19718 38 45 2 1
## 19719 39 45 2 1
## 19720 40 45 2 1
## 19721 41 45 2 1
## 19722 42 45 2 1
## 19723 43 45 2 1
## 19724 44 45 2 1
## 19725 45 45 2 1
## 19726 46 45 2 1
## 19727 47 45 2 1
## 19728 48 45 2 1
## 19729 49 45 2 1
## 19730 50 45 2 1
## 19731 51 45 2 1
## 19732 52 45 2 1
## 19733 53 45 2 1
## 19734 54 45 2 1
## 19735 55 45 2 1
## 19736 56 45 2 1
## 19737 57 45 2 1
## 19738 58 45 2 1
## 19739 59 45 2 1
## 19740 60 45 2 1
## 19741 61 45 2 1
## 19742 62 45 2 1
## 19743 63 45 2 1
## 19744 64 45 2 1
## 19745 65 45 2 1
## 19746 66 45 2 1
## 19747 67 45 2 1
## 19748 68 45 2 1
## 19749 69 45 2 1
## 19750 70 45 2 1
## 19751 71 45 2 1
## 19752 72 45 2 1
## 19753 73 45 2 1
## 19754 74 45 2 1
## 19755 75 45 2 1
## 19756 76 45 2 1
## 19757 77 45 2 1
## 19758 78 45 2 1
## 19759 79 45 2 1
## 19760 80 45 2 1
## 19761 81 45 2 1
## 19762 82 45 2 1
## 19763 83 45 2 1
## 19764 84 45 2 1
## 19765 85 45 2 1
## 19766 86 45 2 1
## 19767 87 45 2 1
## 19768 88 45 2 1
## 19769 89 45 2 1
## 19770 90 45 2 1
## 19771 91 45 2 1
## 19772 92 45 2 1
## 19773 93 45 2 1
## 19774 94 45 2 1
## 19775 95 45 2 1
## 19776 96 45 2 1
## 19777 97 45 2 1
## 19778 98 45 2 1
## 19779 99 45 2 1
## 19780 100 45 2 1
## 19781 101 45 2 1
## 19782 102 45 2 1
## 19783 103 45 2 1
## 19784 104 45 2 1
## 19785 105 45 2 1
## 19786 106 45 2 1
## 19787 107 45 2 1
## 19788 108 45 2 1
## 19789 109 45 2 1
## 19790 110 45 2 1
## 19791 111 45 2 1
## 19792 112 45 2 1
## 19793 113 45 2 1
## 19794 114 45 2 1
## 19795 115 45 2 1
## 19796 116 45 2 1
## 19797 117 45 2 1
## 19798 118 45 2 1
## 19799 119 45 2 1
## 19800 120 45 2 1
## 19801 1 46 2 1
## 19802 2 46 2 1
## 19803 3 46 2 1
## 19804 4 46 2 1
## 19805 5 46 2 1
## 19806 6 46 2 1
## 19807 7 46 2 1
## 19808 8 46 2 1
## 19809 9 46 2 1
## 19810 10 46 2 1
## 19811 11 46 2 1
## 19812 12 46 2 1
## 19813 13 46 2 1
## 19814 14 46 2 1
## 19815 15 46 2 1
## 19816 16 46 2 1
## 19817 17 46 2 1
## 19818 18 46 2 1
## 19819 19 46 2 1
## 19820 20 46 2 1
## 19821 21 46 2 1
## 19822 22 46 2 1
## 19823 23 46 2 1
## 19824 24 46 2 1
## 19825 25 46 2 1
## 19826 26 46 2 1
## 19827 27 46 2 1
## 19828 28 46 2 1
## 19829 29 46 2 1
## 19830 30 46 2 1
## 19831 31 46 2 1
## 19832 32 46 2 1
## 19833 33 46 2 1
## 19834 34 46 2 1
## 19835 35 46 2 1
## 19836 36 46 2 1
## 19837 37 46 2 1
## 19838 38 46 2 1
## 19839 39 46 2 1
## 19840 40 46 2 1
## 19841 41 46 2 1
## 19842 42 46 2 1
## 19843 43 46 2 1
## 19844 44 46 2 1
## 19845 45 46 2 1
## 19846 46 46 2 1
## 19847 47 46 2 1
## 19848 48 46 2 1
## 19849 49 46 2 1
## 19850 50 46 2 1
## 19851 51 46 2 1
## 19852 52 46 2 1
## 19853 53 46 2 1
## 19854 54 46 2 1
## 19855 55 46 2 1
## 19856 56 46 2 1
## 19857 57 46 2 1
## 19858 58 46 2 1
## 19859 59 46 2 1
## 19860 60 46 2 1
## 19861 61 46 2 1
## 19862 62 46 2 1
## 19863 63 46 2 1
## 19864 64 46 2 1
## 19865 65 46 2 1
## 19866 66 46 2 1
## 19867 67 46 2 1
## 19868 68 46 2 1
## 19869 69 46 2 1
## 19870 70 46 2 1
## 19871 71 46 2 1
## 19872 72 46 2 1
## 19873 73 46 2 1
## 19874 74 46 2 1
## 19875 75 46 2 1
## 19876 76 46 2 1
## 19877 77 46 2 1
## 19878 78 46 2 1
## 19879 79 46 2 1
## 19880 80 46 2 1
## 19881 81 46 2 1
## 19882 82 46 2 1
## 19883 83 46 2 1
## 19884 84 46 2 1
## 19885 85 46 2 1
## 19886 86 46 2 1
## 19887 87 46 2 1
## 19888 88 46 2 1
## 19889 89 46 2 1
## 19890 90 46 2 1
## 19891 91 46 2 1
## 19892 92 46 2 1
## 19893 93 46 2 1
## 19894 94 46 2 1
## 19895 95 46 2 1
## 19896 96 46 2 1
## 19897 97 46 2 1
## 19898 98 46 2 1
## 19899 99 46 2 1
## 19900 100 46 2 1
## 19901 101 46 2 1
## 19902 102 46 2 1
## 19903 103 46 2 1
## 19904 104 46 2 1
## 19905 105 46 2 1
## 19906 106 46 2 1
## 19907 107 46 2 1
## 19908 108 46 2 1
## 19909 109 46 2 1
## 19910 110 46 2 1
## 19911 111 46 2 1
## 19912 112 46 2 1
## 19913 113 46 2 1
## 19914 114 46 2 1
## 19915 115 46 2 1
## 19916 116 46 2 1
## 19917 117 46 2 1
## 19918 118 46 2 1
## 19919 119 46 2 1
## 19920 120 46 2 1
## 19921 1 47 2 1
## 19922 2 47 2 1
## 19923 3 47 2 1
## 19924 4 47 2 1
## 19925 5 47 2 1
## 19926 6 47 2 1
## 19927 7 47 2 1
## 19928 8 47 2 1
## 19929 9 47 2 1
## 19930 10 47 2 1
## 19931 11 47 2 1
## 19932 12 47 2 1
## 19933 13 47 2 1
## 19934 14 47 2 1
## 19935 15 47 2 1
## 19936 16 47 2 1
## 19937 17 47 2 1
## 19938 18 47 2 1
## 19939 19 47 2 1
## 19940 20 47 2 1
## 19941 21 47 2 1
## 19942 22 47 2 1
## 19943 23 47 2 1
## 19944 24 47 2 1
## 19945 25 47 2 1
## 19946 26 47 2 1
## 19947 27 47 2 1
## 19948 28 47 2 1
## 19949 29 47 2 1
## 19950 30 47 2 1
## 19951 31 47 2 1
## 19952 32 47 2 1
## 19953 33 47 2 1
## 19954 34 47 2 1
## 19955 35 47 2 1
## 19956 36 47 2 1
## 19957 37 47 2 1
## 19958 38 47 2 1
## 19959 39 47 2 1
## 19960 40 47 2 1
## 19961 41 47 2 1
## 19962 42 47 2 1
## 19963 43 47 2 1
## 19964 44 47 2 1
## 19965 45 47 2 1
## 19966 46 47 2 1
## 19967 47 47 2 1
## 19968 48 47 2 1
## 19969 49 47 2 1
## 19970 50 47 2 1
## 19971 51 47 2 1
## 19972 52 47 2 1
## 19973 53 47 2 1
## 19974 54 47 2 1
## 19975 55 47 2 1
## 19976 56 47 2 1
## 19977 57 47 2 1
## 19978 58 47 2 1
## 19979 59 47 2 1
## 19980 60 47 2 1
## 19981 61 47 2 1
## 19982 62 47 2 1
## 19983 63 47 2 1
## 19984 64 47 2 1
## 19985 65 47 2 1
## 19986 66 47 2 1
## 19987 67 47 2 1
## 19988 68 47 2 1
## 19989 69 47 2 1
## 19990 70 47 2 1
## 19991 71 47 2 1
## 19992 72 47 2 1
## 19993 73 47 2 1
## 19994 74 47 2 1
## 19995 75 47 2 1
## 19996 76 47 2 1
## 19997 77 47 2 1
## 19998 78 47 2 1
## 19999 79 47 2 1
## 20000 80 47 2 1
## 20001 81 47 2 1
## 20002 82 47 2 1
## 20003 83 47 2 1
## 20004 84 47 2 1
## 20005 85 47 2 1
## 20006 86 47 2 1
## 20007 87 47 2 1
## 20008 88 47 2 1
## 20009 89 47 2 1
## 20010 90 47 2 1
## 20011 91 47 2 1
## 20012 92 47 2 1
## 20013 93 47 2 1
## 20014 94 47 2 1
## 20015 95 47 2 1
## 20016 96 47 2 1
## 20017 97 47 2 1
## 20018 98 47 2 1
## 20019 99 47 2 1
## 20020 100 47 2 1
## 20021 101 47 2 1
## 20022 102 47 2 1
## 20023 103 47 2 1
## 20024 104 47 2 1
## 20025 105 47 2 1
## 20026 106 47 2 1
## 20027 107 47 2 1
## 20028 108 47 2 1
## 20029 109 47 2 1
## 20030 110 47 2 1
## 20031 111 47 2 1
## 20032 112 47 2 1
## 20033 113 47 2 1
## 20034 114 47 2 1
## 20035 115 47 2 1
## 20036 116 47 2 1
## 20037 117 47 2 1
## 20038 118 47 2 1
## 20039 119 47 2 1
## 20040 120 47 2 1
## 20041 1 48 2 1
## 20042 2 48 2 1
## 20043 3 48 2 1
## 20044 4 48 2 1
## 20045 5 48 2 1
## 20046 6 48 2 1
## 20047 7 48 2 1
## 20048 8 48 2 1
## 20049 9 48 2 1
## 20050 10 48 2 1
## 20051 11 48 2 1
## 20052 12 48 2 1
## 20053 13 48 2 1
## 20054 14 48 2 1
## 20055 15 48 2 1
## 20056 16 48 2 1
## 20057 17 48 2 1
## 20058 18 48 2 1
## 20059 19 48 2 1
## 20060 20 48 2 1
## 20061 21 48 2 1
## 20062 22 48 2 1
## 20063 23 48 2 1
## 20064 24 48 2 1
## 20065 25 48 2 1
## 20066 26 48 2 1
## 20067 27 48 2 1
## 20068 28 48 2 1
## 20069 29 48 2 1
## 20070 30 48 2 1
## 20071 31 48 2 1
## 20072 32 48 2 1
## 20073 33 48 2 1
## 20074 34 48 2 1
## 20075 35 48 2 1
## 20076 36 48 2 1
## 20077 37 48 2 1
## 20078 38 48 2 1
## 20079 39 48 2 1
## 20080 40 48 2 1
## 20081 41 48 2 1
## 20082 42 48 2 1
## 20083 43 48 2 1
## 20084 44 48 2 1
## 20085 45 48 2 1
## 20086 46 48 2 1
## 20087 47 48 2 1
## 20088 48 48 2 1
## 20089 49 48 2 1
## 20090 50 48 2 1
## 20091 51 48 2 1
## 20092 52 48 2 1
## 20093 53 48 2 1
## 20094 54 48 2 1
## 20095 55 48 2 1
## 20096 56 48 2 1
## 20097 57 48 2 1
## 20098 58 48 2 1
## 20099 59 48 2 1
## 20100 60 48 2 1
## 20101 61 48 2 1
## 20102 62 48 2 1
## 20103 63 48 2 1
## 20104 64 48 2 1
## 20105 65 48 2 1
## 20106 66 48 2 1
## 20107 67 48 2 1
## 20108 68 48 2 1
## 20109 69 48 2 1
## 20110 70 48 2 1
## 20111 71 48 2 1
## 20112 72 48 2 1
## 20113 73 48 2 1
## 20114 74 48 2 1
## 20115 75 48 2 1
## 20116 76 48 2 1
## 20117 77 48 2 1
## 20118 78 48 2 1
## 20119 79 48 2 1
## 20120 80 48 2 1
## 20121 81 48 2 1
## 20122 82 48 2 1
## 20123 83 48 2 1
## 20124 84 48 2 1
## 20125 85 48 2 1
## 20126 86 48 2 1
## 20127 87 48 2 1
## 20128 88 48 2 1
## 20129 89 48 2 1
## 20130 90 48 2 1
## 20131 91 48 2 1
## 20132 92 48 2 1
## 20133 93 48 2 1
## 20134 94 48 2 1
## 20135 95 48 2 1
## 20136 96 48 2 1
## 20137 97 48 2 1
## 20138 98 48 2 1
## 20139 99 48 2 1
## 20140 100 48 2 1
## 20141 101 48 2 1
## 20142 102 48 2 1
## 20143 103 48 2 1
## 20144 104 48 2 1
## 20145 105 48 2 1
## 20146 106 48 2 1
## 20147 107 48 2 1
## 20148 108 48 2 1
## 20149 109 48 2 1
## 20150 110 48 2 1
## 20151 111 48 2 1
## 20152 112 48 2 1
## 20153 113 48 2 1
## 20154 114 48 2 1
## 20155 115 48 2 1
## 20156 116 48 2 1
## 20157 117 48 2 1
## 20158 118 48 2 1
## 20159 119 48 2 1
## 20160 120 48 2 1
## 20161 1 49 2 1
## 20162 2 49 2 1
## 20163 3 49 2 1
## 20164 4 49 2 1
## 20165 5 49 2 1
## 20166 6 49 2 1
## 20167 7 49 2 1
## 20168 8 49 2 1
## 20169 9 49 2 1
## 20170 10 49 2 1
## 20171 11 49 2 1
## 20172 12 49 2 1
## 20173 13 49 2 1
## 20174 14 49 2 1
## 20175 15 49 2 1
## 20176 16 49 2 1
## 20177 17 49 2 1
## 20178 18 49 2 1
## 20179 19 49 2 1
## 20180 20 49 2 1
## 20181 21 49 2 1
## 20182 22 49 2 1
## 20183 23 49 2 1
## 20184 24 49 2 1
## 20185 25 49 2 1
## 20186 26 49 2 1
## 20187 27 49 2 1
## 20188 28 49 2 1
## 20189 29 49 2 1
## 20190 30 49 2 1
## 20191 31 49 2 1
## 20192 32 49 2 1
## 20193 33 49 2 1
## 20194 34 49 2 1
## 20195 35 49 2 1
## 20196 36 49 2 1
## 20197 37 49 2 1
## 20198 38 49 2 1
## 20199 39 49 2 1
## 20200 40 49 2 1
## 20201 41 49 2 1
## 20202 42 49 2 1
## 20203 43 49 2 1
## 20204 44 49 2 1
## 20205 45 49 2 1
## 20206 46 49 2 1
## 20207 47 49 2 1
## 20208 48 49 2 1
## 20209 49 49 2 1
## 20210 50 49 2 1
## 20211 51 49 2 1
## 20212 52 49 2 1
## 20213 53 49 2 1
## 20214 54 49 2 1
## 20215 55 49 2 1
## 20216 56 49 2 1
## 20217 57 49 2 1
## 20218 58 49 2 1
## 20219 59 49 2 1
## 20220 60 49 2 1
## 20221 61 49 2 1
## 20222 62 49 2 1
## 20223 63 49 2 1
## 20224 64 49 2 1
## 20225 65 49 2 1
## 20226 66 49 2 1
## 20227 67 49 2 1
## 20228 68 49 2 1
## 20229 69 49 2 1
## 20230 70 49 2 1
## 20231 71 49 2 1
## 20232 72 49 2 1
## 20233 73 49 2 1
## 20234 74 49 2 1
## 20235 75 49 2 1
## 20236 76 49 2 1
## 20237 77 49 2 1
## 20238 78 49 2 1
## 20239 79 49 2 1
## 20240 80 49 2 1
## 20241 81 49 2 1
## 20242 82 49 2 1
## 20243 83 49 2 1
## 20244 84 49 2 1
## 20245 85 49 2 1
## 20246 86 49 2 1
## 20247 87 49 2 1
## 20248 88 49 2 1
## 20249 89 49 2 1
## 20250 90 49 2 1
## 20251 91 49 2 1
## 20252 92 49 2 1
## 20253 93 49 2 1
## 20254 94 49 2 1
## 20255 95 49 2 1
## 20256 96 49 2 1
## 20257 97 49 2 1
## 20258 98 49 2 1
## 20259 99 49 2 1
## 20260 100 49 2 1
## 20261 101 49 2 1
## 20262 102 49 2 1
## 20263 103 49 2 1
## 20264 104 49 2 1
## 20265 105 49 2 1
## 20266 106 49 2 1
## 20267 107 49 2 1
## 20268 108 49 2 1
## 20269 109 49 2 1
## 20270 110 49 2 1
## 20271 111 49 2 1
## 20272 112 49 2 1
## 20273 113 49 2 1
## 20274 114 49 2 1
## 20275 115 49 2 1
## 20276 116 49 2 1
## 20277 117 49 2 1
## 20278 118 49 2 1
## 20279 119 49 2 1
## 20280 120 49 2 1
## 20281 1 50 2 1
## 20282 2 50 2 1
## 20283 3 50 2 1
## 20284 4 50 2 1
## 20285 5 50 2 1
## 20286 6 50 2 1
## 20287 7 50 2 1
## 20288 8 50 2 1
## 20289 9 50 2 1
## 20290 10 50 2 1
## 20291 11 50 2 1
## 20292 12 50 2 1
## 20293 13 50 2 1
## 20294 14 50 2 1
## 20295 15 50 2 1
## 20296 16 50 2 1
## 20297 17 50 2 1
## 20298 18 50 2 1
## 20299 19 50 2 1
## 20300 20 50 2 1
## 20301 21 50 2 1
## 20302 22 50 2 1
## 20303 23 50 2 1
## 20304 24 50 2 1
## 20305 25 50 2 1
## 20306 26 50 2 1
## 20307 27 50 2 1
## 20308 28 50 2 1
## 20309 29 50 2 1
## 20310 30 50 2 1
## 20311 31 50 2 1
## 20312 32 50 2 1
## 20313 33 50 2 1
## 20314 34 50 2 1
## 20315 35 50 2 1
## 20316 36 50 2 1
## 20317 37 50 2 1
## 20318 38 50 2 1
## 20319 39 50 2 1
## 20320 40 50 2 1
## 20321 41 50 2 1
## 20322 42 50 2 1
## 20323 43 50 2 1
## 20324 44 50 2 1
## 20325 45 50 2 1
## 20326 46 50 2 1
## 20327 47 50 2 1
## 20328 48 50 2 1
## 20329 49 50 2 1
## 20330 50 50 2 1
## 20331 51 50 2 1
## 20332 52 50 2 1
## 20333 53 50 2 1
## 20334 54 50 2 1
## 20335 55 50 2 1
## 20336 56 50 2 1
## 20337 57 50 2 1
## 20338 58 50 2 1
## 20339 59 50 2 1
## 20340 60 50 2 1
## 20341 61 50 2 1
## 20342 62 50 2 1
## 20343 63 50 2 1
## 20344 64 50 2 1
## 20345 65 50 2 1
## 20346 66 50 2 1
## 20347 67 50 2 1
## 20348 68 50 2 1
## 20349 69 50 2 1
## 20350 70 50 2 1
## 20351 71 50 2 1
## 20352 72 50 2 1
## 20353 73 50 2 1
## 20354 74 50 2 1
## 20355 75 50 2 1
## 20356 76 50 2 1
## 20357 77 50 2 1
## 20358 78 50 2 1
## 20359 79 50 2 1
## 20360 80 50 2 1
## 20361 81 50 2 1
## 20362 82 50 2 1
## 20363 83 50 2 1
## 20364 84 50 2 1
## 20365 85 50 2 1
## 20366 86 50 2 1
## 20367 87 50 2 1
## 20368 88 50 2 1
## 20369 89 50 2 1
## 20370 90 50 2 1
## 20371 91 50 2 1
## 20372 92 50 2 1
## 20373 93 50 2 1
## 20374 94 50 2 1
## 20375 95 50 2 1
## 20376 96 50 2 1
## 20377 97 50 2 1
## 20378 98 50 2 1
## 20379 99 50 2 1
## 20380 100 50 2 1
## 20381 101 50 2 1
## 20382 102 50 2 1
## 20383 103 50 2 1
## 20384 104 50 2 1
## 20385 105 50 2 1
## 20386 106 50 2 1
## 20387 107 50 2 1
## 20388 108 50 2 1
## 20389 109 50 2 1
## 20390 110 50 2 1
## 20391 111 50 2 1
## 20392 112 50 2 1
## 20393 113 50 2 1
## 20394 114 50 2 1
## 20395 115 50 2 1
## 20396 116 50 2 1
## 20397 117 50 2 1
## 20398 118 50 2 1
## 20399 119 50 2 1
## 20400 120 50 2 1
## 20401 1 51 2 1
## 20402 2 51 2 1
## 20403 3 51 2 1
## 20404 4 51 2 1
## 20405 5 51 2 1
## 20406 6 51 2 1
## 20407 7 51 2 1
## 20408 8 51 2 1
## 20409 9 51 2 1
## 20410 10 51 2 1
## 20411 11 51 2 1
## 20412 12 51 2 1
## 20413 13 51 2 1
## 20414 14 51 2 1
## 20415 15 51 2 1
## 20416 16 51 2 1
## 20417 17 51 2 1
## 20418 18 51 2 1
## 20419 19 51 2 1
## 20420 20 51 2 1
## 20421 21 51 2 1
## 20422 22 51 2 1
## 20423 23 51 2 1
## 20424 24 51 2 1
## 20425 25 51 2 1
## 20426 26 51 2 1
## 20427 27 51 2 1
## 20428 28 51 2 1
## 20429 29 51 2 1
## 20430 30 51 2 1
## 20431 31 51 2 1
## 20432 32 51 2 1
## 20433 33 51 2 1
## 20434 34 51 2 1
## 20435 35 51 2 1
## 20436 36 51 2 1
## 20437 37 51 2 1
## 20438 38 51 2 1
## 20439 39 51 2 1
## 20440 40 51 2 1
## 20441 41 51 2 1
## 20442 42 51 2 1
## 20443 43 51 2 1
## 20444 44 51 2 1
## 20445 45 51 2 1
## 20446 46 51 2 1
## 20447 47 51 2 1
## 20448 48 51 2 1
## 20449 49 51 2 1
## 20450 50 51 2 1
## 20451 51 51 2 1
## 20452 52 51 2 1
## 20453 53 51 2 1
## 20454 54 51 2 1
## 20455 55 51 2 1
## 20456 56 51 2 1
## 20457 57 51 2 1
## 20458 58 51 2 1
## 20459 59 51 2 1
## 20460 60 51 2 1
## 20461 61 51 2 1
## 20462 62 51 2 1
## 20463 63 51 2 1
## 20464 64 51 2 1
## 20465 65 51 2 1
## 20466 66 51 2 1
## 20467 67 51 2 1
## 20468 68 51 2 1
## 20469 69 51 2 1
## 20470 70 51 2 1
## 20471 71 51 2 1
## 20472 72 51 2 1
## 20473 73 51 2 1
## 20474 74 51 2 1
## 20475 75 51 2 1
## 20476 76 51 2 1
## 20477 77 51 2 1
## 20478 78 51 2 1
## 20479 79 51 2 1
## 20480 80 51 2 1
## 20481 81 51 2 1
## 20482 82 51 2 1
## 20483 83 51 2 1
## 20484 84 51 2 1
## 20485 85 51 2 1
## 20486 86 51 2 1
## 20487 87 51 2 1
## 20488 88 51 2 1
## 20489 89 51 2 1
## 20490 90 51 2 1
## 20491 91 51 2 1
## 20492 92 51 2 1
## 20493 93 51 2 1
## 20494 94 51 2 1
## 20495 95 51 2 1
## 20496 96 51 2 1
## 20497 97 51 2 1
## 20498 98 51 2 1
## 20499 99 51 2 1
## 20500 100 51 2 1
## 20501 101 51 2 1
## 20502 102 51 2 1
## 20503 103 51 2 1
## 20504 104 51 2 1
## 20505 105 51 2 1
## 20506 106 51 2 1
## 20507 107 51 2 1
## 20508 108 51 2 1
## 20509 109 51 2 1
## 20510 110 51 2 1
## 20511 111 51 2 1
## 20512 112 51 2 1
## 20513 113 51 2 1
## 20514 114 51 2 1
## 20515 115 51 2 1
## 20516 116 51 2 1
## 20517 117 51 2 1
## 20518 118 51 2 1
## 20519 119 51 2 1
## 20520 120 51 2 1
## 20521 1 52 2 1
## 20522 2 52 2 1
## 20523 3 52 2 1
## 20524 4 52 2 1
## 20525 5 52 2 1
## 20526 6 52 2 1
## 20527 7 52 2 1
## 20528 8 52 2 1
## 20529 9 52 2 1
## 20530 10 52 2 1
## 20531 11 52 2 1
## 20532 12 52 2 1
## 20533 13 52 2 1
## 20534 14 52 2 1
## 20535 15 52 2 1
## 20536 16 52 2 1
## 20537 17 52 2 1
## 20538 18 52 2 1
## 20539 19 52 2 1
## 20540 20 52 2 1
## 20541 21 52 2 1
## 20542 22 52 2 1
## 20543 23 52 2 1
## 20544 24 52 2 1
## 20545 25 52 2 1
## 20546 26 52 2 1
## 20547 27 52 2 1
## 20548 28 52 2 1
## 20549 29 52 2 1
## 20550 30 52 2 1
## 20551 31 52 2 1
## 20552 32 52 2 1
## 20553 33 52 2 1
## 20554 34 52 2 1
## 20555 35 52 2 1
## 20556 36 52 2 1
## 20557 37 52 2 1
## 20558 38 52 2 1
## 20559 39 52 2 1
## 20560 40 52 2 1
## 20561 41 52 2 1
## 20562 42 52 2 1
## 20563 43 52 2 1
## 20564 44 52 2 1
## 20565 45 52 2 1
## 20566 46 52 2 1
## 20567 47 52 2 1
## 20568 48 52 2 1
## 20569 49 52 2 1
## 20570 50 52 2 1
## 20571 51 52 2 1
## 20572 52 52 2 1
## 20573 53 52 2 1
## 20574 54 52 2 1
## 20575 55 52 2 1
## 20576 56 52 2 1
## 20577 57 52 2 1
## 20578 58 52 2 1
## 20579 59 52 2 1
## 20580 60 52 2 1
## 20581 61 52 2 1
## 20582 62 52 2 1
## 20583 63 52 2 1
## 20584 64 52 2 1
## 20585 65 52 2 1
## 20586 66 52 2 1
## 20587 67 52 2 1
## 20588 68 52 2 1
## 20589 69 52 2 1
## 20590 70 52 2 1
## 20591 71 52 2 1
## 20592 72 52 2 1
## 20593 73 52 2 1
## 20594 74 52 2 1
## 20595 75 52 2 1
## 20596 76 52 2 1
## 20597 77 52 2 1
## 20598 78 52 2 1
## 20599 79 52 2 1
## 20600 80 52 2 1
## 20601 81 52 2 1
## 20602 82 52 2 1
## 20603 83 52 2 1
## 20604 84 52 2 1
## 20605 85 52 2 1
## 20606 86 52 2 1
## 20607 87 52 2 1
## 20608 88 52 2 1
## 20609 89 52 2 1
## 20610 90 52 2 1
## 20611 91 52 2 1
## 20612 92 52 2 1
## 20613 93 52 2 1
## 20614 94 52 2 1
## 20615 95 52 2 1
## 20616 96 52 2 1
## 20617 97 52 2 1
## 20618 98 52 2 1
## 20619 99 52 2 1
## 20620 100 52 2 1
## 20621 101 52 2 1
## 20622 102 52 2 1
## 20623 103 52 2 1
## 20624 104 52 2 1
## 20625 105 52 2 1
## 20626 106 52 2 1
## 20627 107 52 2 1
## 20628 108 52 2 1
## 20629 109 52 2 1
## 20630 110 52 2 1
## 20631 111 52 2 1
## 20632 112 52 2 1
## 20633 113 52 2 1
## 20634 114 52 2 1
## 20635 115 52 2 1
## 20636 116 52 2 1
## 20637 117 52 2 1
## 20638 118 52 2 1
## 20639 119 52 2 1
## 20640 120 52 2 1
## 20641 1 53 2 1
## 20642 2 53 2 1
## 20643 3 53 2 1
## 20644 4 53 2 1
## 20645 5 53 2 1
## 20646 6 53 2 1
## 20647 7 53 2 1
## 20648 8 53 2 1
## 20649 9 53 2 1
## 20650 10 53 2 1
## 20651 11 53 2 1
## 20652 12 53 2 1
## 20653 13 53 2 1
## 20654 14 53 2 1
## 20655 15 53 2 1
## 20656 16 53 2 1
## 20657 17 53 2 1
## 20658 18 53 2 1
## 20659 19 53 2 1
## 20660 20 53 2 1
## 20661 21 53 2 1
## 20662 22 53 2 1
## 20663 23 53 2 1
## 20664 24 53 2 1
## 20665 25 53 2 1
## 20666 26 53 2 1
## 20667 27 53 2 1
## 20668 28 53 2 1
## 20669 29 53 2 1
## 20670 30 53 2 1
## 20671 31 53 2 1
## 20672 32 53 2 1
## 20673 33 53 2 1
## 20674 34 53 2 1
## 20675 35 53 2 1
## 20676 36 53 2 1
## 20677 37 53 2 1
## 20678 38 53 2 1
## 20679 39 53 2 1
## 20680 40 53 2 1
## 20681 41 53 2 1
## 20682 42 53 2 1
## 20683 43 53 2 1
## 20684 44 53 2 1
## 20685 45 53 2 1
## 20686 46 53 2 1
## 20687 47 53 2 1
## 20688 48 53 2 1
## 20689 49 53 2 1
## 20690 50 53 2 1
## 20691 51 53 2 1
## 20692 52 53 2 1
## 20693 53 53 2 1
## 20694 54 53 2 1
## 20695 55 53 2 1
## 20696 56 53 2 1
## 20697 57 53 2 1
## 20698 58 53 2 1
## 20699 59 53 2 1
## 20700 60 53 2 1
## 20701 61 53 2 1
## 20702 62 53 2 1
## 20703 63 53 2 1
## 20704 64 53 2 1
## 20705 65 53 2 1
## 20706 66 53 2 1
## 20707 67 53 2 1
## 20708 68 53 2 1
## 20709 69 53 2 1
## 20710 70 53 2 1
## 20711 71 53 2 1
## 20712 72 53 2 1
## 20713 73 53 2 1
## 20714 74 53 2 1
## 20715 75 53 2 1
## 20716 76 53 2 1
## 20717 77 53 2 1
## 20718 78 53 2 1
## 20719 79 53 2 1
## 20720 80 53 2 1
## 20721 81 53 2 1
## 20722 82 53 2 1
## 20723 83 53 2 1
## 20724 84 53 2 1
## 20725 85 53 2 1
## 20726 86 53 2 1
## 20727 87 53 2 1
## 20728 88 53 2 1
## 20729 89 53 2 1
## 20730 90 53 2 1
## 20731 91 53 2 1
## 20732 92 53 2 1
## 20733 93 53 2 1
## 20734 94 53 2 1
## 20735 95 53 2 1
## 20736 96 53 2 1
## 20737 97 53 2 1
## 20738 98 53 2 1
## 20739 99 53 2 1
## 20740 100 53 2 1
## 20741 101 53 2 1
## 20742 102 53 2 1
## 20743 103 53 2 1
## 20744 104 53 2 1
## 20745 105 53 2 1
## 20746 106 53 2 1
## 20747 107 53 2 1
## 20748 108 53 2 1
## 20749 109 53 2 1
## 20750 110 53 2 1
## 20751 111 53 2 1
## 20752 112 53 2 1
## 20753 113 53 2 1
## 20754 114 53 2 1
## 20755 115 53 2 1
## 20756 116 53 2 1
## 20757 117 53 2 1
## 20758 118 53 2 1
## 20759 119 53 2 1
## 20760 120 53 2 1
## 20761 1 54 2 1
## 20762 2 54 2 1
## 20763 3 54 2 1
## 20764 4 54 2 1
## 20765 5 54 2 1
## 20766 6 54 2 1
## 20767 7 54 2 1
## 20768 8 54 2 1
## 20769 9 54 2 1
## 20770 10 54 2 1
## 20771 11 54 2 1
## 20772 12 54 2 1
## 20773 13 54 2 1
## 20774 14 54 2 1
## 20775 15 54 2 1
## 20776 16 54 2 1
## 20777 17 54 2 1
## 20778 18 54 2 1
## 20779 19 54 2 1
## 20780 20 54 2 1
## 20781 21 54 2 1
## 20782 22 54 2 1
## 20783 23 54 2 1
## 20784 24 54 2 1
## 20785 25 54 2 1
## 20786 26 54 2 1
## 20787 27 54 2 1
## 20788 28 54 2 1
## 20789 29 54 2 1
## 20790 30 54 2 1
## 20791 31 54 2 1
## 20792 32 54 2 1
## 20793 33 54 2 1
## 20794 34 54 2 1
## 20795 35 54 2 1
## 20796 36 54 2 1
## 20797 37 54 2 1
## 20798 38 54 2 1
## 20799 39 54 2 1
## 20800 40 54 2 1
## 20801 41 54 2 1
## 20802 42 54 2 1
## 20803 43 54 2 1
## 20804 44 54 2 1
## 20805 45 54 2 1
## 20806 46 54 2 1
## 20807 47 54 2 1
## 20808 48 54 2 1
## 20809 49 54 2 1
## 20810 50 54 2 1
## 20811 51 54 2 1
## 20812 52 54 2 1
## 20813 53 54 2 1
## 20814 54 54 2 1
## 20815 55 54 2 1
## 20816 56 54 2 1
## 20817 57 54 2 1
## 20818 58 54 2 1
## 20819 59 54 2 1
## 20820 60 54 2 1
## 20821 61 54 2 1
## 20822 62 54 2 1
## 20823 63 54 2 1
## 20824 64 54 2 1
## 20825 65 54 2 1
## 20826 66 54 2 1
## 20827 67 54 2 1
## 20828 68 54 2 1
## 20829 69 54 2 1
## 20830 70 54 2 1
## 20831 71 54 2 1
## 20832 72 54 2 1
## 20833 73 54 2 1
## 20834 74 54 2 1
## 20835 75 54 2 1
## 20836 76 54 2 1
## 20837 77 54 2 1
## 20838 78 54 2 1
## 20839 79 54 2 1
## 20840 80 54 2 1
## 20841 81 54 2 1
## 20842 82 54 2 1
## 20843 83 54 2 1
## 20844 84 54 2 1
## 20845 85 54 2 1
## 20846 86 54 2 1
## 20847 87 54 2 1
## 20848 88 54 2 1
## 20849 89 54 2 1
## 20850 90 54 2 1
## 20851 91 54 2 1
## 20852 92 54 2 1
## 20853 93 54 2 1
## 20854 94 54 2 1
## 20855 95 54 2 1
## 20856 96 54 2 1
## 20857 97 54 2 1
## 20858 98 54 2 1
## 20859 99 54 2 1
## 20860 100 54 2 1
## 20861 101 54 2 1
## 20862 102 54 2 1
## 20863 103 54 2 1
## 20864 104 54 2 1
## 20865 105 54 2 1
## 20866 106 54 2 1
## 20867 107 54 2 1
## 20868 108 54 2 1
## 20869 109 54 2 1
## 20870 110 54 2 1
## 20871 111 54 2 1
## 20872 112 54 2 1
## 20873 113 54 2 1
## 20874 114 54 2 1
## 20875 115 54 2 1
## 20876 116 54 2 1
## 20877 117 54 2 1
## 20878 118 54 2 1
## 20879 119 54 2 1
## 20880 120 54 2 1
## 20881 1 55 2 1
## 20882 2 55 2 1
## 20883 3 55 2 1
## 20884 4 55 2 1
## 20885 5 55 2 1
## 20886 6 55 2 1
## 20887 7 55 2 1
## 20888 8 55 2 1
## 20889 9 55 2 1
## 20890 10 55 2 1
## 20891 11 55 2 1
## 20892 12 55 2 1
## 20893 13 55 2 1
## 20894 14 55 2 1
## 20895 15 55 2 1
## 20896 16 55 2 1
## 20897 17 55 2 1
## 20898 18 55 2 1
## 20899 19 55 2 1
## 20900 20 55 2 1
## 20901 21 55 2 1
## 20902 22 55 2 1
## 20903 23 55 2 1
## 20904 24 55 2 1
## 20905 25 55 2 1
## 20906 26 55 2 1
## 20907 27 55 2 1
## 20908 28 55 2 1
## 20909 29 55 2 1
## 20910 30 55 2 1
## 20911 31 55 2 1
## 20912 32 55 2 1
## 20913 33 55 2 1
## 20914 34 55 2 1
## 20915 35 55 2 1
## 20916 36 55 2 1
## 20917 37 55 2 1
## 20918 38 55 2 1
## 20919 39 55 2 1
## 20920 40 55 2 1
## 20921 41 55 2 1
## 20922 42 55 2 1
## 20923 43 55 2 1
## 20924 44 55 2 1
## 20925 45 55 2 1
## 20926 46 55 2 1
## 20927 47 55 2 1
## 20928 48 55 2 1
## 20929 49 55 2 1
## 20930 50 55 2 1
## 20931 51 55 2 1
## 20932 52 55 2 1
## 20933 53 55 2 1
## 20934 54 55 2 1
## 20935 55 55 2 1
## 20936 56 55 2 1
## 20937 57 55 2 1
## 20938 58 55 2 1
## 20939 59 55 2 1
## 20940 60 55 2 1
## 20941 61 55 2 1
## 20942 62 55 2 1
## 20943 63 55 2 1
## 20944 64 55 2 1
## 20945 65 55 2 1
## 20946 66 55 2 1
## 20947 67 55 2 1
## 20948 68 55 2 1
## 20949 69 55 2 1
## 20950 70 55 2 1
## 20951 71 55 2 1
## 20952 72 55 2 1
## 20953 73 55 2 1
## 20954 74 55 2 1
## 20955 75 55 2 1
## 20956 76 55 2 1
## 20957 77 55 2 1
## 20958 78 55 2 1
## 20959 79 55 2 1
## 20960 80 55 2 1
## 20961 81 55 2 1
## 20962 82 55 2 1
## 20963 83 55 2 1
## 20964 84 55 2 1
## 20965 85 55 2 1
## 20966 86 55 2 1
## 20967 87 55 2 1
## 20968 88 55 2 1
## 20969 89 55 2 1
## 20970 90 55 2 1
## 20971 91 55 2 1
## 20972 92 55 2 1
## 20973 93 55 2 1
## 20974 94 55 2 1
## 20975 95 55 2 1
## 20976 96 55 2 1
## 20977 97 55 2 1
## 20978 98 55 2 1
## 20979 99 55 2 1
## 20980 100 55 2 1
## 20981 101 55 2 1
## 20982 102 55 2 1
## 20983 103 55 2 1
## 20984 104 55 2 1
## 20985 105 55 2 1
## 20986 106 55 2 1
## 20987 107 55 2 1
## 20988 108 55 2 1
## 20989 109 55 2 1
## 20990 110 55 2 1
## 20991 111 55 2 1
## 20992 112 55 2 1
## 20993 113 55 2 1
## 20994 114 55 2 1
## 20995 115 55 2 1
## 20996 116 55 2 1
## 20997 117 55 2 1
## 20998 118 55 2 1
## 20999 119 55 2 1
## 21000 120 55 2 1
## 21001 1 56 2 1
## 21002 2 56 2 1
## 21003 3 56 2 1
## 21004 4 56 2 1
## 21005 5 56 2 1
## 21006 6 56 2 1
## 21007 7 56 2 1
## 21008 8 56 2 1
## 21009 9 56 2 1
## 21010 10 56 2 1
## 21011 11 56 2 1
## 21012 12 56 2 1
## 21013 13 56 2 1
## 21014 14 56 2 1
## 21015 15 56 2 1
## 21016 16 56 2 1
## 21017 17 56 2 1
## 21018 18 56 2 1
## 21019 19 56 2 1
## 21020 20 56 2 1
## 21021 21 56 2 1
## 21022 22 56 2 1
## 21023 23 56 2 1
## 21024 24 56 2 1
## 21025 25 56 2 1
## 21026 26 56 2 1
## 21027 27 56 2 1
## 21028 28 56 2 1
## 21029 29 56 2 1
## 21030 30 56 2 1
## 21031 31 56 2 1
## 21032 32 56 2 1
## 21033 33 56 2 1
## 21034 34 56 2 1
## 21035 35 56 2 1
## 21036 36 56 2 1
## 21037 37 56 2 1
## 21038 38 56 2 1
## 21039 39 56 2 1
## 21040 40 56 2 1
## 21041 41 56 2 1
## 21042 42 56 2 1
## 21043 43 56 2 1
## 21044 44 56 2 1
## 21045 45 56 2 1
## 21046 46 56 2 1
## 21047 47 56 2 1
## 21048 48 56 2 1
## 21049 49 56 2 1
## 21050 50 56 2 1
## 21051 51 56 2 1
## 21052 52 56 2 1
## 21053 53 56 2 1
## 21054 54 56 2 1
## 21055 55 56 2 1
## 21056 56 56 2 1
## 21057 57 56 2 1
## 21058 58 56 2 1
## 21059 59 56 2 1
## 21060 60 56 2 1
## 21061 61 56 2 1
## 21062 62 56 2 1
## 21063 63 56 2 1
## 21064 64 56 2 1
## 21065 65 56 2 1
## 21066 66 56 2 1
## 21067 67 56 2 1
## 21068 68 56 2 1
## 21069 69 56 2 1
## 21070 70 56 2 1
## 21071 71 56 2 1
## 21072 72 56 2 1
## 21073 73 56 2 1
## 21074 74 56 2 1
## 21075 75 56 2 1
## 21076 76 56 2 1
## 21077 77 56 2 1
## 21078 78 56 2 1
## 21079 79 56 2 1
## 21080 80 56 2 1
## 21081 81 56 2 1
## 21082 82 56 2 1
## 21083 83 56 2 1
## 21084 84 56 2 1
## 21085 85 56 2 1
## 21086 86 56 2 1
## 21087 87 56 2 1
## 21088 88 56 2 1
## 21089 89 56 2 1
## 21090 90 56 2 1
## 21091 91 56 2 1
## 21092 92 56 2 1
## 21093 93 56 2 1
## 21094 94 56 2 1
## 21095 95 56 2 1
## 21096 96 56 2 1
## 21097 97 56 2 1
## 21098 98 56 2 1
## 21099 99 56 2 1
## 21100 100 56 2 1
## 21101 101 56 2 1
## 21102 102 56 2 1
## 21103 103 56 2 1
## 21104 104 56 2 1
## 21105 105 56 2 1
## 21106 106 56 2 1
## 21107 107 56 2 1
## 21108 108 56 2 1
## 21109 109 56 2 1
## 21110 110 56 2 1
## 21111 111 56 2 1
## 21112 112 56 2 1
## 21113 113 56 2 1
## 21114 114 56 2 1
## 21115 115 56 2 1
## 21116 116 56 2 1
## 21117 117 56 2 1
## 21118 118 56 2 1
## 21119 119 56 2 1
## 21120 120 56 2 1
## 21121 1 57 2 1
## 21122 2 57 2 1
## 21123 3 57 2 1
## 21124 4 57 2 1
## 21125 5 57 2 1
## 21126 6 57 2 1
## 21127 7 57 2 1
## 21128 8 57 2 1
## 21129 9 57 2 1
## 21130 10 57 2 1
## 21131 11 57 2 1
## 21132 12 57 2 1
## 21133 13 57 2 1
## 21134 14 57 2 1
## 21135 15 57 2 1
## 21136 16 57 2 1
## 21137 17 57 2 1
## 21138 18 57 2 1
## 21139 19 57 2 1
## 21140 20 57 2 1
## 21141 21 57 2 1
## 21142 22 57 2 1
## 21143 23 57 2 1
## 21144 24 57 2 1
## 21145 25 57 2 1
## 21146 26 57 2 1
## 21147 27 57 2 1
## 21148 28 57 2 1
## 21149 29 57 2 1
## 21150 30 57 2 1
## 21151 31 57 2 1
## 21152 32 57 2 1
## 21153 33 57 2 1
## 21154 34 57 2 1
## 21155 35 57 2 1
## 21156 36 57 2 1
## 21157 37 57 2 1
## 21158 38 57 2 1
## 21159 39 57 2 1
## 21160 40 57 2 1
## 21161 41 57 2 1
## 21162 42 57 2 1
## 21163 43 57 2 1
## 21164 44 57 2 1
## 21165 45 57 2 1
## 21166 46 57 2 1
## 21167 47 57 2 1
## 21168 48 57 2 1
## 21169 49 57 2 1
## 21170 50 57 2 1
## 21171 51 57 2 1
## 21172 52 57 2 1
## 21173 53 57 2 1
## 21174 54 57 2 1
## 21175 55 57 2 1
## 21176 56 57 2 1
## 21177 57 57 2 1
## 21178 58 57 2 1
## 21179 59 57 2 1
## 21180 60 57 2 1
## 21181 61 57 2 1
## 21182 62 57 2 1
## 21183 63 57 2 1
## 21184 64 57 2 1
## 21185 65 57 2 1
## 21186 66 57 2 1
## 21187 67 57 2 1
## 21188 68 57 2 1
## 21189 69 57 2 1
## 21190 70 57 2 1
## 21191 71 57 2 1
## 21192 72 57 2 1
## 21193 73 57 2 1
## 21194 74 57 2 1
## 21195 75 57 2 1
## 21196 76 57 2 1
## 21197 77 57 2 1
## 21198 78 57 2 1
## 21199 79 57 2 1
## 21200 80 57 2 1
## 21201 81 57 2 1
## 21202 82 57 2 1
## 21203 83 57 2 1
## 21204 84 57 2 1
## 21205 85 57 2 1
## 21206 86 57 2 1
## 21207 87 57 2 1
## 21208 88 57 2 1
## 21209 89 57 2 1
## 21210 90 57 2 1
## 21211 91 57 2 1
## 21212 92 57 2 1
## 21213 93 57 2 1
## 21214 94 57 2 1
## 21215 95 57 2 1
## 21216 96 57 2 1
## 21217 97 57 2 1
## 21218 98 57 2 1
## 21219 99 57 2 1
## 21220 100 57 2 1
## 21221 101 57 2 1
## 21222 102 57 2 1
## 21223 103 57 2 1
## 21224 104 57 2 1
## 21225 105 57 2 1
## 21226 106 57 2 1
## 21227 107 57 2 1
## 21228 108 57 2 1
## 21229 109 57 2 1
## 21230 110 57 2 1
## 21231 111 57 2 1
## 21232 112 57 2 1
## 21233 113 57 2 1
## 21234 114 57 2 1
## 21235 115 57 2 1
## 21236 116 57 2 1
## 21237 117 57 2 1
## 21238 118 57 2 1
## 21239 119 57 2 1
## 21240 120 57 2 1
## 21241 1 58 2 1
## 21242 2 58 2 1
## 21243 3 58 2 1
## 21244 4 58 2 1
## 21245 5 58 2 1
## 21246 6 58 2 1
## 21247 7 58 2 1
## 21248 8 58 2 1
## 21249 9 58 2 1
## 21250 10 58 2 1
## 21251 11 58 2 1
## 21252 12 58 2 1
## 21253 13 58 2 1
## 21254 14 58 2 1
## 21255 15 58 2 1
## 21256 16 58 2 1
## 21257 17 58 2 1
## 21258 18 58 2 1
## 21259 19 58 2 1
## 21260 20 58 2 1
## 21261 21 58 2 1
## 21262 22 58 2 1
## 21263 23 58 2 1
## 21264 24 58 2 1
## 21265 25 58 2 1
## 21266 26 58 2 1
## 21267 27 58 2 1
## 21268 28 58 2 1
## 21269 29 58 2 1
## 21270 30 58 2 1
## 21271 31 58 2 1
## 21272 32 58 2 1
## 21273 33 58 2 1
## 21274 34 58 2 1
## 21275 35 58 2 1
## 21276 36 58 2 1
## 21277 37 58 2 1
## 21278 38 58 2 1
## 21279 39 58 2 1
## 21280 40 58 2 1
## 21281 41 58 2 1
## 21282 42 58 2 1
## 21283 43 58 2 1
## 21284 44 58 2 1
## 21285 45 58 2 1
## 21286 46 58 2 1
## 21287 47 58 2 1
## 21288 48 58 2 1
## 21289 49 58 2 1
## 21290 50 58 2 1
## 21291 51 58 2 1
## 21292 52 58 2 1
## 21293 53 58 2 1
## 21294 54 58 2 1
## 21295 55 58 2 1
## 21296 56 58 2 1
## 21297 57 58 2 1
## 21298 58 58 2 1
## 21299 59 58 2 1
## 21300 60 58 2 1
## 21301 61 58 2 1
## 21302 62 58 2 1
## 21303 63 58 2 1
## 21304 64 58 2 1
## 21305 65 58 2 1
## 21306 66 58 2 1
## 21307 67 58 2 1
## 21308 68 58 2 1
## 21309 69 58 2 1
## 21310 70 58 2 1
## 21311 71 58 2 1
## 21312 72 58 2 1
## 21313 73 58 2 1
## 21314 74 58 2 1
## 21315 75 58 2 1
## 21316 76 58 2 1
## 21317 77 58 2 1
## 21318 78 58 2 1
## 21319 79 58 2 1
## 21320 80 58 2 1
## 21321 81 58 2 1
## 21322 82 58 2 1
## 21323 83 58 2 1
## 21324 84 58 2 1
## 21325 85 58 2 1
## 21326 86 58 2 1
## 21327 87 58 2 1
## 21328 88 58 2 1
## 21329 89 58 2 1
## 21330 90 58 2 1
## 21331 91 58 2 1
## 21332 92 58 2 1
## 21333 93 58 2 1
## 21334 94 58 2 1
## 21335 95 58 2 1
## 21336 96 58 2 1
## 21337 97 58 2 1
## 21338 98 58 2 1
## 21339 99 58 2 1
## 21340 100 58 2 1
## 21341 101 58 2 1
## 21342 102 58 2 1
## 21343 103 58 2 1
## 21344 104 58 2 1
## 21345 105 58 2 1
## 21346 106 58 2 1
## 21347 107 58 2 1
## 21348 108 58 2 1
## 21349 109 58 2 1
## 21350 110 58 2 1
## 21351 111 58 2 1
## 21352 112 58 2 1
## 21353 113 58 2 1
## 21354 114 58 2 1
## 21355 115 58 2 1
## 21356 116 58 2 1
## 21357 117 58 2 1
## 21358 118 58 2 1
## 21359 119 58 2 1
## 21360 120 58 2 1
## 21361 1 59 2 1
## 21362 2 59 2 1
## 21363 3 59 2 1
## 21364 4 59 2 1
## 21365 5 59 2 1
## 21366 6 59 2 1
## 21367 7 59 2 1
## 21368 8 59 2 1
## 21369 9 59 2 1
## 21370 10 59 2 1
## 21371 11 59 2 1
## 21372 12 59 2 1
## 21373 13 59 2 1
## 21374 14 59 2 1
## 21375 15 59 2 1
## 21376 16 59 2 1
## 21377 17 59 2 1
## 21378 18 59 2 1
## 21379 19 59 2 1
## 21380 20 59 2 1
## 21381 21 59 2 1
## 21382 22 59 2 1
## 21383 23 59 2 1
## 21384 24 59 2 1
## 21385 25 59 2 1
## 21386 26 59 2 1
## 21387 27 59 2 1
## 21388 28 59 2 1
## 21389 29 59 2 1
## 21390 30 59 2 1
## 21391 31 59 2 1
## 21392 32 59 2 1
## 21393 33 59 2 1
## 21394 34 59 2 1
## 21395 35 59 2 1
## 21396 36 59 2 1
## 21397 37 59 2 1
## 21398 38 59 2 1
## 21399 39 59 2 1
## 21400 40 59 2 1
## 21401 41 59 2 1
## 21402 42 59 2 1
## 21403 43 59 2 1
## 21404 44 59 2 1
## 21405 45 59 2 1
## 21406 46 59 2 1
## 21407 47 59 2 1
## 21408 48 59 2 1
## 21409 49 59 2 1
## 21410 50 59 2 1
## 21411 51 59 2 1
## 21412 52 59 2 1
## 21413 53 59 2 1
## 21414 54 59 2 1
## 21415 55 59 2 1
## 21416 56 59 2 1
## 21417 57 59 2 1
## 21418 58 59 2 1
## 21419 59 59 2 1
## 21420 60 59 2 1
## 21421 61 59 2 1
## 21422 62 59 2 1
## 21423 63 59 2 1
## 21424 64 59 2 1
## 21425 65 59 2 1
## 21426 66 59 2 1
## 21427 67 59 2 1
## 21428 68 59 2 1
## 21429 69 59 2 1
## 21430 70 59 2 1
## 21431 71 59 2 1
## 21432 72 59 2 1
## 21433 73 59 2 1
## 21434 74 59 2 1
## 21435 75 59 2 1
## 21436 76 59 2 1
## 21437 77 59 2 1
## 21438 78 59 2 1
## 21439 79 59 2 1
## 21440 80 59 2 1
## 21441 81 59 2 1
## 21442 82 59 2 1
## 21443 83 59 2 1
## 21444 84 59 2 1
## 21445 85 59 2 1
## 21446 86 59 2 1
## 21447 87 59 2 1
## 21448 88 59 2 1
## 21449 89 59 2 1
## 21450 90 59 2 1
## 21451 91 59 2 1
## 21452 92 59 2 1
## 21453 93 59 2 1
## 21454 94 59 2 1
## 21455 95 59 2 1
## 21456 96 59 2 1
## 21457 97 59 2 1
## 21458 98 59 2 1
## 21459 99 59 2 1
## 21460 100 59 2 1
## 21461 101 59 2 1
## 21462 102 59 2 1
## 21463 103 59 2 1
## 21464 104 59 2 1
## 21465 105 59 2 1
## 21466 106 59 2 1
## 21467 107 59 2 1
## 21468 108 59 2 1
## 21469 109 59 2 1
## 21470 110 59 2 1
## 21471 111 59 2 1
## 21472 112 59 2 1
## 21473 113 59 2 1
## 21474 114 59 2 1
## 21475 115 59 2 1
## 21476 116 59 2 1
## 21477 117 59 2 1
## 21478 118 59 2 1
## 21479 119 59 2 1
## 21480 120 59 2 1
## 21481 1 60 2 1
## 21482 2 60 2 1
## 21483 3 60 2 1
## 21484 4 60 2 1
## 21485 5 60 2 1
## 21486 6 60 2 1
## 21487 7 60 2 1
## 21488 8 60 2 1
## 21489 9 60 2 1
## 21490 10 60 2 1
## 21491 11 60 2 1
## 21492 12 60 2 1
## 21493 13 60 2 1
## 21494 14 60 2 1
## 21495 15 60 2 1
## 21496 16 60 2 1
## 21497 17 60 2 1
## 21498 18 60 2 1
## 21499 19 60 2 1
## 21500 20 60 2 1
## 21501 21 60 2 1
## 21502 22 60 2 1
## 21503 23 60 2 1
## 21504 24 60 2 1
## 21505 25 60 2 1
## 21506 26 60 2 1
## 21507 27 60 2 1
## 21508 28 60 2 1
## 21509 29 60 2 1
## 21510 30 60 2 1
## 21511 31 60 2 1
## 21512 32 60 2 1
## 21513 33 60 2 1
## 21514 34 60 2 1
## 21515 35 60 2 1
## 21516 36 60 2 1
## 21517 37 60 2 1
## 21518 38 60 2 1
## 21519 39 60 2 1
## 21520 40 60 2 1
## 21521 41 60 2 1
## 21522 42 60 2 1
## 21523 43 60 2 1
## 21524 44 60 2 1
## 21525 45 60 2 1
## 21526 46 60 2 1
## 21527 47 60 2 1
## 21528 48 60 2 1
## 21529 49 60 2 1
## 21530 50 60 2 1
## 21531 51 60 2 1
## 21532 52 60 2 1
## 21533 53 60 2 1
## 21534 54 60 2 1
## 21535 55 60 2 1
## 21536 56 60 2 1
## 21537 57 60 2 1
## 21538 58 60 2 1
## 21539 59 60 2 1
## 21540 60 60 2 1
## 21541 61 60 2 1
## 21542 62 60 2 1
## 21543 63 60 2 1
## 21544 64 60 2 1
## 21545 65 60 2 1
## 21546 66 60 2 1
## 21547 67 60 2 1
## 21548 68 60 2 1
## 21549 69 60 2 1
## 21550 70 60 2 1
## 21551 71 60 2 1
## 21552 72 60 2 1
## 21553 73 60 2 1
## 21554 74 60 2 1
## 21555 75 60 2 1
## 21556 76 60 2 1
## 21557 77 60 2 1
## 21558 78 60 2 1
## 21559 79 60 2 1
## 21560 80 60 2 1
## 21561 81 60 2 1
## 21562 82 60 2 1
## 21563 83 60 2 1
## 21564 84 60 2 1
## 21565 85 60 2 1
## 21566 86 60 2 1
## 21567 87 60 2 1
## 21568 88 60 2 1
## 21569 89 60 2 1
## 21570 90 60 2 1
## 21571 91 60 2 1
## 21572 92 60 2 1
## 21573 93 60 2 1
## 21574 94 60 2 1
## 21575 95 60 2 1
## 21576 96 60 2 1
## 21577 97 60 2 1
## 21578 98 60 2 1
## 21579 99 60 2 1
## 21580 100 60 2 1
## 21581 101 60 2 1
## 21582 102 60 2 1
## 21583 103 60 2 1
## 21584 104 60 2 1
## 21585 105 60 2 1
## 21586 106 60 2 1
## 21587 107 60 2 1
## 21588 108 60 2 1
## 21589 109 60 2 1
## 21590 110 60 2 1
## 21591 111 60 2 1
## 21592 112 60 2 1
## 21593 113 60 2 1
## 21594 114 60 2 1
## 21595 115 60 2 1
## 21596 116 60 2 1
## 21597 117 60 2 1
## 21598 118 60 2 1
## 21599 119 60 2 1
## 21600 120 60 2 1
## 21601 1 61 2 1
## 21602 2 61 2 1
## 21603 3 61 2 1
## 21604 4 61 2 1
## 21605 5 61 2 1
## 21606 6 61 2 1
## 21607 7 61 2 1
## 21608 8 61 2 1
## 21609 9 61 2 1
## 21610 10 61 2 1
## 21611 11 61 2 1
## 21612 12 61 2 1
## 21613 13 61 2 1
## 21614 14 61 2 1
## 21615 15 61 2 1
## 21616 16 61 2 1
## 21617 17 61 2 1
## 21618 18 61 2 1
## 21619 19 61 2 1
## 21620 20 61 2 1
## 21621 21 61 2 1
## 21622 22 61 2 1
## 21623 23 61 2 1
## 21624 24 61 2 1
## 21625 25 61 2 1
## 21626 26 61 2 1
## 21627 27 61 2 1
## 21628 28 61 2 1
## 21629 29 61 2 1
## 21630 30 61 2 1
## 21631 31 61 2 1
## 21632 32 61 2 1
## 21633 33 61 2 1
## 21634 34 61 2 1
## 21635 35 61 2 1
## 21636 36 61 2 1
## 21637 37 61 2 1
## 21638 38 61 2 1
## 21639 39 61 2 1
## 21640 40 61 2 1
## 21641 41 61 2 1
## 21642 42 61 2 1
## 21643 43 61 2 1
## 21644 44 61 2 1
## 21645 45 61 2 1
## 21646 46 61 2 1
## 21647 47 61 2 1
## 21648 48 61 2 1
## 21649 49 61 2 1
## 21650 50 61 2 1
## 21651 51 61 2 1
## 21652 52 61 2 1
## 21653 53 61 2 1
## 21654 54 61 2 1
## 21655 55 61 2 1
## 21656 56 61 2 1
## 21657 57 61 2 1
## 21658 58 61 2 1
## 21659 59 61 2 1
## 21660 60 61 2 1
## 21661 61 61 2 1
## 21662 62 61 2 1
## 21663 63 61 2 1
## 21664 64 61 2 1
## 21665 65 61 2 1
## 21666 66 61 2 1
## 21667 67 61 2 1
## 21668 68 61 2 1
## 21669 69 61 2 1
## 21670 70 61 2 1
## 21671 71 61 2 1
## 21672 72 61 2 1
## 21673 73 61 2 1
## 21674 74 61 2 1
## 21675 75 61 2 1
## 21676 76 61 2 1
## 21677 77 61 2 1
## 21678 78 61 2 1
## 21679 79 61 2 1
## 21680 80 61 2 1
## 21681 81 61 2 1
## 21682 82 61 2 1
## 21683 83 61 2 1
## 21684 84 61 2 1
## 21685 85 61 2 1
## 21686 86 61 2 1
## 21687 87 61 2 1
## 21688 88 61 2 1
## 21689 89 61 2 1
## 21690 90 61 2 1
## 21691 91 61 2 1
## 21692 92 61 2 1
## 21693 93 61 2 1
## 21694 94 61 2 1
## 21695 95 61 2 1
## 21696 96 61 2 1
## 21697 97 61 2 1
## 21698 98 61 2 1
## 21699 99 61 2 1
## 21700 100 61 2 1
## 21701 101 61 2 1
## 21702 102 61 2 1
## 21703 103 61 2 1
## 21704 104 61 2 1
## 21705 105 61 2 1
## 21706 106 61 2 1
## 21707 107 61 2 1
## 21708 108 61 2 1
## 21709 109 61 2 1
## 21710 110 61 2 1
## 21711 111 61 2 1
## 21712 112 61 2 1
## 21713 113 61 2 1
## 21714 114 61 2 1
## 21715 115 61 2 1
## 21716 116 61 2 1
## 21717 117 61 2 1
## 21718 118 61 2 1
## 21719 119 61 2 1
## 21720 120 61 2 1
## 21721 1 62 2 1
## 21722 2 62 2 1
## 21723 3 62 2 1
## 21724 4 62 2 1
## 21725 5 62 2 1
## 21726 6 62 2 1
## 21727 7 62 2 1
## 21728 8 62 2 1
## 21729 9 62 2 1
## 21730 10 62 2 1
## 21731 11 62 2 1
## 21732 12 62 2 1
## 21733 13 62 2 1
## 21734 14 62 2 1
## 21735 15 62 2 1
## 21736 16 62 2 1
## 21737 17 62 2 1
## 21738 18 62 2 1
## 21739 19 62 2 1
## 21740 20 62 2 1
## 21741 21 62 2 1
## 21742 22 62 2 1
## 21743 23 62 2 1
## 21744 24 62 2 1
## 21745 25 62 2 1
## 21746 26 62 2 1
## 21747 27 62 2 1
## 21748 28 62 2 1
## 21749 29 62 2 1
## 21750 30 62 2 1
## 21751 31 62 2 1
## 21752 32 62 2 1
## 21753 33 62 2 1
## 21754 34 62 2 1
## 21755 35 62 2 1
## 21756 36 62 2 1
## 21757 37 62 2 1
## 21758 38 62 2 1
## 21759 39 62 2 1
## 21760 40 62 2 1
## 21761 41 62 2 1
## 21762 42 62 2 1
## 21763 43 62 2 1
## 21764 44 62 2 1
## 21765 45 62 2 1
## 21766 46 62 2 1
## 21767 47 62 2 1
## 21768 48 62 2 1
## 21769 49 62 2 1
## 21770 50 62 2 1
## 21771 51 62 2 1
## 21772 52 62 2 1
## 21773 53 62 2 1
## 21774 54 62 2 1
## 21775 55 62 2 1
## 21776 56 62 2 1
## 21777 57 62 2 1
## 21778 58 62 2 1
## 21779 59 62 2 1
## 21780 60 62 2 1
## 21781 61 62 2 1
## 21782 62 62 2 1
## 21783 63 62 2 1
## 21784 64 62 2 1
## 21785 65 62 2 1
## 21786 66 62 2 1
## 21787 67 62 2 1
## 21788 68 62 2 1
## 21789 69 62 2 1
## 21790 70 62 2 1
## 21791 71 62 2 1
## 21792 72 62 2 1
## 21793 73 62 2 1
## 21794 74 62 2 1
## 21795 75 62 2 1
## 21796 76 62 2 1
## 21797 77 62 2 1
## 21798 78 62 2 1
## 21799 79 62 2 1
## 21800 80 62 2 1
## 21801 81 62 2 1
## 21802 82 62 2 1
## 21803 83 62 2 1
## 21804 84 62 2 1
## 21805 85 62 2 1
## 21806 86 62 2 1
## 21807 87 62 2 1
## 21808 88 62 2 1
## 21809 89 62 2 1
## 21810 90 62 2 1
## 21811 91 62 2 1
## 21812 92 62 2 1
## 21813 93 62 2 1
## 21814 94 62 2 1
## 21815 95 62 2 1
## 21816 96 62 2 1
## 21817 97 62 2 1
## 21818 98 62 2 1
## 21819 99 62 2 1
## 21820 100 62 2 1
## 21821 101 62 2 1
## 21822 102 62 2 1
## 21823 103 62 2 1
## 21824 104 62 2 1
## 21825 105 62 2 1
## 21826 106 62 2 1
## 21827 107 62 2 1
## 21828 108 62 2 1
## 21829 109 62 2 1
## 21830 110 62 2 1
## 21831 111 62 2 1
## 21832 112 62 2 1
## 21833 113 62 2 1
## 21834 114 62 2 1
## 21835 115 62 2 1
## 21836 116 62 2 1
## 21837 117 62 2 1
## 21838 118 62 2 1
## 21839 119 62 2 1
## 21840 120 62 2 1
## 21841 1 63 2 1
## 21842 2 63 2 1
## 21843 3 63 2 1
## 21844 4 63 2 1
## 21845 5 63 2 1
## 21846 6 63 2 1
## 21847 7 63 2 1
## 21848 8 63 2 1
## 21849 9 63 2 1
## 21850 10 63 2 1
## 21851 11 63 2 1
## 21852 12 63 2 1
## 21853 13 63 2 1
## 21854 14 63 2 1
## 21855 15 63 2 1
## 21856 16 63 2 1
## 21857 17 63 2 1
## 21858 18 63 2 1
## 21859 19 63 2 1
## 21860 20 63 2 1
## 21861 21 63 2 1
## 21862 22 63 2 1
## 21863 23 63 2 1
## 21864 24 63 2 1
## 21865 25 63 2 1
## 21866 26 63 2 1
## 21867 27 63 2 1
## 21868 28 63 2 1
## 21869 29 63 2 1
## 21870 30 63 2 1
## 21871 31 63 2 1
## 21872 32 63 2 1
## 21873 33 63 2 1
## 21874 34 63 2 1
## 21875 35 63 2 1
## 21876 36 63 2 1
## 21877 37 63 2 1
## 21878 38 63 2 1
## 21879 39 63 2 1
## 21880 40 63 2 1
## 21881 41 63 2 1
## 21882 42 63 2 1
## 21883 43 63 2 1
## 21884 44 63 2 1
## 21885 45 63 2 1
## 21886 46 63 2 1
## 21887 47 63 2 1
## 21888 48 63 2 1
## 21889 49 63 2 1
## 21890 50 63 2 1
## 21891 51 63 2 1
## 21892 52 63 2 1
## 21893 53 63 2 1
## 21894 54 63 2 1
## 21895 55 63 2 1
## 21896 56 63 2 1
## 21897 57 63 2 1
## 21898 58 63 2 1
## 21899 59 63 2 1
## 21900 60 63 2 1
## 21901 61 63 2 1
## 21902 62 63 2 1
## 21903 63 63 2 1
## 21904 64 63 2 1
## 21905 65 63 2 1
## 21906 66 63 2 1
## 21907 67 63 2 1
## 21908 68 63 2 1
## 21909 69 63 2 1
## 21910 70 63 2 1
## 21911 71 63 2 1
## 21912 72 63 2 1
## 21913 73 63 2 1
## 21914 74 63 2 1
## 21915 75 63 2 1
## 21916 76 63 2 1
## 21917 77 63 2 1
## 21918 78 63 2 1
## 21919 79 63 2 1
## 21920 80 63 2 1
## 21921 81 63 2 1
## 21922 82 63 2 1
## 21923 83 63 2 1
## 21924 84 63 2 1
## 21925 85 63 2 1
## 21926 86 63 2 1
## 21927 87 63 2 1
## 21928 88 63 2 1
## 21929 89 63 2 1
## 21930 90 63 2 1
## 21931 91 63 2 1
## 21932 92 63 2 1
## 21933 93 63 2 1
## 21934 94 63 2 1
## 21935 95 63 2 1
## 21936 96 63 2 1
## 21937 97 63 2 1
## 21938 98 63 2 1
## 21939 99 63 2 1
## 21940 100 63 2 1
## 21941 101 63 2 1
## 21942 102 63 2 1
## 21943 103 63 2 1
## 21944 104 63 2 1
## 21945 105 63 2 1
## 21946 106 63 2 1
## 21947 107 63 2 1
## 21948 108 63 2 1
## 21949 109 63 2 1
## 21950 110 63 2 1
## 21951 111 63 2 1
## 21952 112 63 2 1
## 21953 113 63 2 1
## 21954 114 63 2 1
## 21955 115 63 2 1
## 21956 116 63 2 1
## 21957 117 63 2 1
## 21958 118 63 2 1
## 21959 119 63 2 1
## 21960 120 63 2 1
## 21961 1 64 2 1
## 21962 2 64 2 1
## 21963 3 64 2 1
## 21964 4 64 2 1
## 21965 5 64 2 1
## 21966 6 64 2 1
## 21967 7 64 2 1
## 21968 8 64 2 1
## 21969 9 64 2 1
## 21970 10 64 2 1
## 21971 11 64 2 1
## 21972 12 64 2 1
## 21973 13 64 2 1
## 21974 14 64 2 1
## 21975 15 64 2 1
## 21976 16 64 2 1
## 21977 17 64 2 1
## 21978 18 64 2 1
## 21979 19 64 2 1
## 21980 20 64 2 1
## 21981 21 64 2 1
## 21982 22 64 2 1
## 21983 23 64 2 1
## 21984 24 64 2 1
## 21985 25 64 2 1
## 21986 26 64 2 1
## 21987 27 64 2 1
## 21988 28 64 2 1
## 21989 29 64 2 1
## 21990 30 64 2 1
## 21991 31 64 2 1
## 21992 32 64 2 1
## 21993 33 64 2 1
## 21994 34 64 2 1
## 21995 35 64 2 1
## 21996 36 64 2 1
## 21997 37 64 2 1
## 21998 38 64 2 1
## 21999 39 64 2 1
## 22000 40 64 2 1
## 22001 41 64 2 1
## 22002 42 64 2 1
## 22003 43 64 2 1
## 22004 44 64 2 1
## 22005 45 64 2 1
## 22006 46 64 2 1
## 22007 47 64 2 1
## 22008 48 64 2 1
## 22009 49 64 2 1
## 22010 50 64 2 1
## 22011 51 64 2 1
## 22012 52 64 2 1
## 22013 53 64 2 1
## 22014 54 64 2 1
## 22015 55 64 2 1
## 22016 56 64 2 1
## 22017 57 64 2 1
## 22018 58 64 2 1
## 22019 59 64 2 1
## 22020 60 64 2 1
## 22021 61 64 2 1
## 22022 62 64 2 1
## 22023 63 64 2 1
## 22024 64 64 2 1
## 22025 65 64 2 1
## 22026 66 64 2 1
## 22027 67 64 2 1
## 22028 68 64 2 1
## 22029 69 64 2 1
## 22030 70 64 2 1
## 22031 71 64 2 1
## 22032 72 64 2 1
## 22033 73 64 2 1
## 22034 74 64 2 1
## 22035 75 64 2 1
## 22036 76 64 2 1
## 22037 77 64 2 1
## 22038 78 64 2 1
## 22039 79 64 2 1
## 22040 80 64 2 1
## 22041 81 64 2 1
## 22042 82 64 2 1
## 22043 83 64 2 1
## 22044 84 64 2 1
## 22045 85 64 2 1
## 22046 86 64 2 1
## 22047 87 64 2 1
## 22048 88 64 2 1
## 22049 89 64 2 1
## 22050 90 64 2 1
## 22051 91 64 2 1
## 22052 92 64 2 1
## 22053 93 64 2 1
## 22054 94 64 2 1
## 22055 95 64 2 1
## 22056 96 64 2 1
## 22057 97 64 2 1
## 22058 98 64 2 1
## 22059 99 64 2 1
## 22060 100 64 2 1
## 22061 101 64 2 1
## 22062 102 64 2 1
## 22063 103 64 2 1
## 22064 104 64 2 1
## 22065 105 64 2 1
## 22066 106 64 2 1
## 22067 107 64 2 1
## 22068 108 64 2 1
## 22069 109 64 2 1
## 22070 110 64 2 1
## 22071 111 64 2 1
## 22072 112 64 2 1
## 22073 113 64 2 1
## 22074 114 64 2 1
## 22075 115 64 2 1
## 22076 116 64 2 1
## 22077 117 64 2 1
## 22078 118 64 2 1
## 22079 119 64 2 1
## 22080 120 64 2 1
## 22081 1 65 2 1
## 22082 2 65 2 1
## 22083 3 65 2 1
## 22084 4 65 2 1
## 22085 5 65 2 1
## 22086 6 65 2 1
## 22087 7 65 2 1
## 22088 8 65 2 1
## 22089 9 65 2 1
## 22090 10 65 2 1
## 22091 11 65 2 1
## 22092 12 65 2 1
## 22093 13 65 2 1
## 22094 14 65 2 1
## 22095 15 65 2 1
## 22096 16 65 2 1
## 22097 17 65 2 1
## 22098 18 65 2 1
## 22099 19 65 2 1
## 22100 20 65 2 1
## 22101 21 65 2 1
## 22102 22 65 2 1
## 22103 23 65 2 1
## 22104 24 65 2 1
## 22105 25 65 2 1
## 22106 26 65 2 1
## 22107 27 65 2 1
## 22108 28 65 2 1
## 22109 29 65 2 1
## 22110 30 65 2 1
## 22111 31 65 2 1
## 22112 32 65 2 1
## 22113 33 65 2 1
## 22114 34 65 2 1
## 22115 35 65 2 1
## 22116 36 65 2 1
## 22117 37 65 2 1
## 22118 38 65 2 1
## 22119 39 65 2 1
## 22120 40 65 2 1
## 22121 41 65 2 1
## 22122 42 65 2 1
## 22123 43 65 2 1
## 22124 44 65 2 1
## 22125 45 65 2 1
## 22126 46 65 2 1
## 22127 47 65 2 1
## 22128 48 65 2 1
## 22129 49 65 2 1
## 22130 50 65 2 1
## 22131 51 65 2 1
## 22132 52 65 2 1
## 22133 53 65 2 1
## 22134 54 65 2 1
## 22135 55 65 2 1
## 22136 56 65 2 1
## 22137 57 65 2 1
## 22138 58 65 2 1
## 22139 59 65 2 1
## 22140 60 65 2 1
## 22141 61 65 2 1
## 22142 62 65 2 1
## 22143 63 65 2 1
## 22144 64 65 2 1
## 22145 65 65 2 1
## 22146 66 65 2 1
## 22147 67 65 2 1
## 22148 68 65 2 1
## 22149 69 65 2 1
## 22150 70 65 2 1
## 22151 71 65 2 1
## 22152 72 65 2 1
## 22153 73 65 2 1
## 22154 74 65 2 1
## 22155 75 65 2 1
## 22156 76 65 2 1
## 22157 77 65 2 1
## 22158 78 65 2 1
## 22159 79 65 2 1
## 22160 80 65 2 1
## 22161 81 65 2 1
## 22162 82 65 2 1
## 22163 83 65 2 1
## 22164 84 65 2 1
## 22165 85 65 2 1
## 22166 86 65 2 1
## 22167 87 65 2 1
## 22168 88 65 2 1
## 22169 89 65 2 1
## 22170 90 65 2 1
## 22171 91 65 2 1
## 22172 92 65 2 1
## 22173 93 65 2 1
## 22174 94 65 2 1
## 22175 95 65 2 1
## 22176 96 65 2 1
## 22177 97 65 2 1
## 22178 98 65 2 1
## 22179 99 65 2 1
## 22180 100 65 2 1
## 22181 101 65 2 1
## 22182 102 65 2 1
## 22183 103 65 2 1
## 22184 104 65 2 1
## 22185 105 65 2 1
## 22186 106 65 2 1
## 22187 107 65 2 1
## 22188 108 65 2 1
## 22189 109 65 2 1
## 22190 110 65 2 1
## 22191 111 65 2 1
## 22192 112 65 2 1
## 22193 113 65 2 1
## 22194 114 65 2 1
## 22195 115 65 2 1
## 22196 116 65 2 1
## 22197 117 65 2 1
## 22198 118 65 2 1
## 22199 119 65 2 1
## 22200 120 65 2 1
## 22201 1 66 2 1
## 22202 2 66 2 1
## 22203 3 66 2 1
## 22204 4 66 2 1
## 22205 5 66 2 1
## 22206 6 66 2 1
## 22207 7 66 2 1
## 22208 8 66 2 1
## 22209 9 66 2 1
## 22210 10 66 2 1
## 22211 11 66 2 1
## 22212 12 66 2 1
## 22213 13 66 2 1
## 22214 14 66 2 1
## 22215 15 66 2 1
## 22216 16 66 2 1
## 22217 17 66 2 1
## 22218 18 66 2 1
## 22219 19 66 2 1
## 22220 20 66 2 1
## 22221 21 66 2 1
## 22222 22 66 2 1
## 22223 23 66 2 1
## 22224 24 66 2 1
## 22225 25 66 2 1
## 22226 26 66 2 1
## 22227 27 66 2 1
## 22228 28 66 2 1
## 22229 29 66 2 1
## 22230 30 66 2 1
## 22231 31 66 2 1
## 22232 32 66 2 1
## 22233 33 66 2 1
## 22234 34 66 2 1
## 22235 35 66 2 1
## 22236 36 66 2 1
## 22237 37 66 2 1
## 22238 38 66 2 1
## 22239 39 66 2 1
## 22240 40 66 2 1
## 22241 41 66 2 1
## 22242 42 66 2 1
## 22243 43 66 2 1
## 22244 44 66 2 1
## 22245 45 66 2 1
## 22246 46 66 2 1
## 22247 47 66 2 1
## 22248 48 66 2 1
## 22249 49 66 2 1
## 22250 50 66 2 1
## 22251 51 66 2 1
## 22252 52 66 2 1
## 22253 53 66 2 1
## 22254 54 66 2 1
## 22255 55 66 2 1
## 22256 56 66 2 1
## 22257 57 66 2 1
## 22258 58 66 2 1
## 22259 59 66 2 1
## 22260 60 66 2 1
## 22261 61 66 2 1
## 22262 62 66 2 1
## 22263 63 66 2 1
## 22264 64 66 2 1
## 22265 65 66 2 1
## 22266 66 66 2 1
## 22267 67 66 2 1
## 22268 68 66 2 1
## 22269 69 66 2 1
## 22270 70 66 2 1
## 22271 71 66 2 1
## 22272 72 66 2 1
## 22273 73 66 2 1
## 22274 74 66 2 1
## 22275 75 66 2 1
## 22276 76 66 2 1
## 22277 77 66 2 1
## 22278 78 66 2 1
## 22279 79 66 2 1
## 22280 80 66 2 1
## 22281 81 66 2 1
## 22282 82 66 2 1
## 22283 83 66 2 1
## 22284 84 66 2 1
## 22285 85 66 2 1
## 22286 86 66 2 1
## 22287 87 66 2 1
## 22288 88 66 2 1
## 22289 89 66 2 1
## 22290 90 66 2 1
## 22291 91 66 2 1
## 22292 92 66 2 1
## 22293 93 66 2 1
## 22294 94 66 2 1
## 22295 95 66 2 1
## 22296 96 66 2 1
## 22297 97 66 2 1
## 22298 98 66 2 1
## 22299 99 66 2 1
## 22300 100 66 2 1
## 22301 101 66 2 1
## 22302 102 66 2 1
## 22303 103 66 2 1
## 22304 104 66 2 1
## 22305 105 66 2 1
## 22306 106 66 2 1
## 22307 107 66 2 1
## 22308 108 66 2 1
## 22309 109 66 2 1
## 22310 110 66 2 1
## 22311 111 66 2 1
## 22312 112 66 2 1
## 22313 113 66 2 1
## 22314 114 66 2 1
## 22315 115 66 2 1
## 22316 116 66 2 1
## 22317 117 66 2 1
## 22318 118 66 2 1
## 22319 119 66 2 1
## 22320 120 66 2 1
## 22321 1 67 2 1
## 22322 2 67 2 1
## 22323 3 67 2 1
## 22324 4 67 2 1
## 22325 5 67 2 1
## 22326 6 67 2 1
## 22327 7 67 2 1
## 22328 8 67 2 1
## 22329 9 67 2 1
## 22330 10 67 2 1
## 22331 11 67 2 1
## 22332 12 67 2 1
## 22333 13 67 2 1
## 22334 14 67 2 1
## 22335 15 67 2 1
## 22336 16 67 2 1
## 22337 17 67 2 1
## 22338 18 67 2 1
## 22339 19 67 2 1
## 22340 20 67 2 1
## 22341 21 67 2 1
## 22342 22 67 2 1
## 22343 23 67 2 1
## 22344 24 67 2 1
## 22345 25 67 2 1
## 22346 26 67 2 1
## 22347 27 67 2 1
## 22348 28 67 2 1
## 22349 29 67 2 1
## 22350 30 67 2 1
## 22351 31 67 2 1
## 22352 32 67 2 1
## 22353 33 67 2 1
## 22354 34 67 2 1
## 22355 35 67 2 1
## 22356 36 67 2 1
## 22357 37 67 2 1
## 22358 38 67 2 1
## 22359 39 67 2 1
## 22360 40 67 2 1
## 22361 41 67 2 1
## 22362 42 67 2 1
## 22363 43 67 2 1
## 22364 44 67 2 1
## 22365 45 67 2 1
## 22366 46 67 2 1
## 22367 47 67 2 1
## 22368 48 67 2 1
## 22369 49 67 2 1
## 22370 50 67 2 1
## 22371 51 67 2 1
## 22372 52 67 2 1
## 22373 53 67 2 1
## 22374 54 67 2 1
## 22375 55 67 2 1
## 22376 56 67 2 1
## 22377 57 67 2 1
## 22378 58 67 2 1
## 22379 59 67 2 1
## 22380 60 67 2 1
## 22381 61 67 2 1
## 22382 62 67 2 1
## 22383 63 67 2 1
## 22384 64 67 2 1
## 22385 65 67 2 1
## 22386 66 67 2 1
## 22387 67 67 2 1
## 22388 68 67 2 1
## 22389 69 67 2 1
## 22390 70 67 2 1
## 22391 71 67 2 1
## 22392 72 67 2 1
## 22393 73 67 2 1
## 22394 74 67 2 1
## 22395 75 67 2 1
## 22396 76 67 2 1
## 22397 77 67 2 1
## 22398 78 67 2 1
## 22399 79 67 2 1
## 22400 80 67 2 1
## 22401 81 67 2 1
## 22402 82 67 2 1
## 22403 83 67 2 1
## 22404 84 67 2 1
## 22405 85 67 2 1
## 22406 86 67 2 1
## 22407 87 67 2 1
## 22408 88 67 2 1
## 22409 89 67 2 1
## 22410 90 67 2 1
## 22411 91 67 2 1
## 22412 92 67 2 1
## 22413 93 67 2 1
## 22414 94 67 2 1
## 22415 95 67 2 1
## 22416 96 67 2 1
## 22417 97 67 2 1
## 22418 98 67 2 1
## 22419 99 67 2 1
## 22420 100 67 2 1
## 22421 101 67 2 1
## 22422 102 67 2 1
## 22423 103 67 2 1
## 22424 104 67 2 1
## 22425 105 67 2 1
## 22426 106 67 2 1
## 22427 107 67 2 1
## 22428 108 67 2 1
## 22429 109 67 2 1
## 22430 110 67 2 1
## 22431 111 67 2 1
## 22432 112 67 2 1
## 22433 113 67 2 1
## 22434 114 67 2 1
## 22435 115 67 2 1
## 22436 116 67 2 1
## 22437 117 67 2 1
## 22438 118 67 2 1
## 22439 119 67 2 1
## 22440 120 67 2 1
## 22441 1 68 2 1
## 22442 2 68 2 1
## 22443 3 68 2 1
## 22444 4 68 2 1
## 22445 5 68 2 1
## 22446 6 68 2 1
## 22447 7 68 2 1
## 22448 8 68 2 1
## 22449 9 68 2 1
## 22450 10 68 2 1
## 22451 11 68 2 1
## 22452 12 68 2 1
## 22453 13 68 2 1
## 22454 14 68 2 1
## 22455 15 68 2 1
## 22456 16 68 2 1
## 22457 17 68 2 1
## 22458 18 68 2 1
## 22459 19 68 2 1
## 22460 20 68 2 1
## 22461 21 68 2 1
## 22462 22 68 2 1
## 22463 23 68 2 1
## 22464 24 68 2 1
## 22465 25 68 2 1
## 22466 26 68 2 1
## 22467 27 68 2 1
## 22468 28 68 2 1
## 22469 29 68 2 1
## 22470 30 68 2 1
## 22471 31 68 2 1
## 22472 32 68 2 1
## 22473 33 68 2 1
## 22474 34 68 2 1
## 22475 35 68 2 1
## 22476 36 68 2 1
## 22477 37 68 2 1
## 22478 38 68 2 1
## 22479 39 68 2 1
## 22480 40 68 2 1
## 22481 41 68 2 1
## 22482 42 68 2 1
## 22483 43 68 2 1
## 22484 44 68 2 1
## 22485 45 68 2 1
## 22486 46 68 2 1
## 22487 47 68 2 1
## 22488 48 68 2 1
## 22489 49 68 2 1
## 22490 50 68 2 1
## 22491 51 68 2 1
## 22492 52 68 2 1
## 22493 53 68 2 1
## 22494 54 68 2 1
## 22495 55 68 2 1
## 22496 56 68 2 1
## 22497 57 68 2 1
## 22498 58 68 2 1
## 22499 59 68 2 1
## 22500 60 68 2 1
## 22501 61 68 2 1
## 22502 62 68 2 1
## 22503 63 68 2 1
## 22504 64 68 2 1
## 22505 65 68 2 1
## 22506 66 68 2 1
## 22507 67 68 2 1
## 22508 68 68 2 1
## 22509 69 68 2 1
## 22510 70 68 2 1
## 22511 71 68 2 1
## 22512 72 68 2 1
## 22513 73 68 2 1
## 22514 74 68 2 1
## 22515 75 68 2 1
## 22516 76 68 2 1
## 22517 77 68 2 1
## 22518 78 68 2 1
## 22519 79 68 2 1
## 22520 80 68 2 1
## 22521 81 68 2 1
## 22522 82 68 2 1
## 22523 83 68 2 1
## 22524 84 68 2 1
## 22525 85 68 2 1
## 22526 86 68 2 1
## 22527 87 68 2 1
## 22528 88 68 2 1
## 22529 89 68 2 1
## 22530 90 68 2 1
## 22531 91 68 2 1
## 22532 92 68 2 1
## 22533 93 68 2 1
## 22534 94 68 2 1
## 22535 95 68 2 1
## 22536 96 68 2 1
## 22537 97 68 2 1
## 22538 98 68 2 1
## 22539 99 68 2 1
## 22540 100 68 2 1
## 22541 101 68 2 1
## 22542 102 68 2 1
## 22543 103 68 2 1
## 22544 104 68 2 1
## 22545 105 68 2 1
## 22546 106 68 2 1
## 22547 107 68 2 1
## 22548 108 68 2 1
## 22549 109 68 2 1
## 22550 110 68 2 1
## 22551 111 68 2 1
## 22552 112 68 2 1
## 22553 113 68 2 1
## 22554 114 68 2 1
## 22555 115 68 2 1
## 22556 116 68 2 1
## 22557 117 68 2 1
## 22558 118 68 2 1
## 22559 119 68 2 1
## 22560 120 68 2 1
## 22561 1 69 2 1
## 22562 2 69 2 1
## 22563 3 69 2 1
## 22564 4 69 2 1
## 22565 5 69 2 1
## 22566 6 69 2 1
## 22567 7 69 2 1
## 22568 8 69 2 1
## 22569 9 69 2 1
## 22570 10 69 2 1
## 22571 11 69 2 1
## 22572 12 69 2 1
## 22573 13 69 2 1
## 22574 14 69 2 1
## 22575 15 69 2 1
## 22576 16 69 2 1
## 22577 17 69 2 1
## 22578 18 69 2 1
## 22579 19 69 2 1
## 22580 20 69 2 1
## 22581 21 69 2 1
## 22582 22 69 2 1
## 22583 23 69 2 1
## 22584 24 69 2 1
## 22585 25 69 2 1
## 22586 26 69 2 1
## 22587 27 69 2 1
## 22588 28 69 2 1
## 22589 29 69 2 1
## 22590 30 69 2 1
## 22591 31 69 2 1
## 22592 32 69 2 1
## 22593 33 69 2 1
## 22594 34 69 2 1
## 22595 35 69 2 1
## 22596 36 69 2 1
## 22597 37 69 2 1
## 22598 38 69 2 1
## 22599 39 69 2 1
## 22600 40 69 2 1
## 22601 41 69 2 1
## 22602 42 69 2 1
## 22603 43 69 2 1
## 22604 44 69 2 1
## 22605 45 69 2 1
## 22606 46 69 2 1
## 22607 47 69 2 1
## 22608 48 69 2 1
## 22609 49 69 2 1
## 22610 50 69 2 1
## 22611 51 69 2 1
## 22612 52 69 2 1
## 22613 53 69 2 1
## 22614 54 69 2 1
## 22615 55 69 2 1
## 22616 56 69 2 1
## 22617 57 69 2 1
## 22618 58 69 2 1
## 22619 59 69 2 1
## 22620 60 69 2 1
## 22621 61 69 2 1
## 22622 62 69 2 1
## 22623 63 69 2 1
## 22624 64 69 2 1
## 22625 65 69 2 1
## 22626 66 69 2 1
## 22627 67 69 2 1
## 22628 68 69 2 1
## 22629 69 69 2 1
## 22630 70 69 2 1
## 22631 71 69 2 1
## 22632 72 69 2 1
## 22633 73 69 2 1
## 22634 74 69 2 1
## 22635 75 69 2 1
## 22636 76 69 2 1
## 22637 77 69 2 1
## 22638 78 69 2 1
## 22639 79 69 2 1
## 22640 80 69 2 1
## 22641 81 69 2 1
## 22642 82 69 2 1
## 22643 83 69 2 1
## 22644 84 69 2 1
## 22645 85 69 2 1
## 22646 86 69 2 1
## 22647 87 69 2 1
## 22648 88 69 2 1
## 22649 89 69 2 1
## 22650 90 69 2 1
## 22651 91 69 2 1
## 22652 92 69 2 1
## 22653 93 69 2 1
## 22654 94 69 2 1
## 22655 95 69 2 1
## 22656 96 69 2 1
## 22657 97 69 2 1
## 22658 98 69 2 1
## 22659 99 69 2 1
## 22660 100 69 2 1
## 22661 101 69 2 1
## 22662 102 69 2 1
## 22663 103 69 2 1
## 22664 104 69 2 1
## 22665 105 69 2 1
## 22666 106 69 2 1
## 22667 107 69 2 1
## 22668 108 69 2 1
## 22669 109 69 2 1
## 22670 110 69 2 1
## 22671 111 69 2 1
## 22672 112 69 2 1
## 22673 113 69 2 1
## 22674 114 69 2 1
## 22675 115 69 2 1
## 22676 116 69 2 1
## 22677 117 69 2 1
## 22678 118 69 2 1
## 22679 119 69 2 1
## 22680 120 69 2 1
## 22681 1 70 2 1
## 22682 2 70 2 1
## 22683 3 70 2 1
## 22684 4 70 2 1
## 22685 5 70 2 1
## 22686 6 70 2 1
## 22687 7 70 2 1
## 22688 8 70 2 1
## 22689 9 70 2 1
## 22690 10 70 2 1
## 22691 11 70 2 1
## 22692 12 70 2 1
## 22693 13 70 2 1
## 22694 14 70 2 1
## 22695 15 70 2 1
## 22696 16 70 2 1
## 22697 17 70 2 1
## 22698 18 70 2 1
## 22699 19 70 2 1
## 22700 20 70 2 1
## 22701 21 70 2 1
## 22702 22 70 2 1
## 22703 23 70 2 1
## 22704 24 70 2 1
## 22705 25 70 2 1
## 22706 26 70 2 1
## 22707 27 70 2 1
## 22708 28 70 2 1
## 22709 29 70 2 1
## 22710 30 70 2 1
## 22711 31 70 2 1
## 22712 32 70 2 1
## 22713 33 70 2 1
## 22714 34 70 2 1
## 22715 35 70 2 1
## 22716 36 70 2 1
## 22717 37 70 2 1
## 22718 38 70 2 1
## 22719 39 70 2 1
## 22720 40 70 2 1
## 22721 41 70 2 1
## 22722 42 70 2 1
## 22723 43 70 2 1
## 22724 44 70 2 1
## 22725 45 70 2 1
## 22726 46 70 2 1
## 22727 47 70 2 1
## 22728 48 70 2 1
## 22729 49 70 2 1
## 22730 50 70 2 1
## 22731 51 70 2 1
## 22732 52 70 2 1
## 22733 53 70 2 1
## 22734 54 70 2 1
## 22735 55 70 2 1
## 22736 56 70 2 1
## 22737 57 70 2 1
## 22738 58 70 2 1
## 22739 59 70 2 1
## 22740 60 70 2 1
## 22741 61 70 2 1
## 22742 62 70 2 1
## 22743 63 70 2 1
## 22744 64 70 2 1
## 22745 65 70 2 1
## 22746 66 70 2 1
## 22747 67 70 2 1
## 22748 68 70 2 1
## 22749 69 70 2 1
## 22750 70 70 2 1
## 22751 71 70 2 1
## 22752 72 70 2 1
## 22753 73 70 2 1
## 22754 74 70 2 1
## 22755 75 70 2 1
## 22756 76 70 2 1
## 22757 77 70 2 1
## 22758 78 70 2 1
## 22759 79 70 2 1
## 22760 80 70 2 1
## 22761 81 70 2 1
## 22762 82 70 2 1
## 22763 83 70 2 1
## 22764 84 70 2 1
## 22765 85 70 2 1
## 22766 86 70 2 1
## 22767 87 70 2 1
## 22768 88 70 2 1
## 22769 89 70 2 1
## 22770 90 70 2 1
## 22771 91 70 2 1
## 22772 92 70 2 1
## 22773 93 70 2 1
## 22774 94 70 2 1
## 22775 95 70 2 1
## 22776 96 70 2 1
## 22777 97 70 2 1
## 22778 98 70 2 1
## 22779 99 70 2 1
## 22780 100 70 2 1
## 22781 101 70 2 1
## 22782 102 70 2 1
## 22783 103 70 2 1
## 22784 104 70 2 1
## 22785 105 70 2 1
## 22786 106 70 2 1
## 22787 107 70 2 1
## 22788 108 70 2 1
## 22789 109 70 2 1
## 22790 110 70 2 1
## 22791 111 70 2 1
## 22792 112 70 2 1
## 22793 113 70 2 1
## 22794 114 70 2 1
## 22795 115 70 2 1
## 22796 116 70 2 1
## 22797 117 70 2 1
## 22798 118 70 2 1
## 22799 119 70 2 1
## 22800 120 70 2 1
## 22801 1 71 2 1
## 22802 2 71 2 1
## 22803 3 71 2 1
## 22804 4 71 2 1
## 22805 5 71 2 1
## 22806 6 71 2 1
## 22807 7 71 2 1
## 22808 8 71 2 1
## 22809 9 71 2 1
## 22810 10 71 2 1
## 22811 11 71 2 1
## 22812 12 71 2 1
## 22813 13 71 2 1
## 22814 14 71 2 1
## 22815 15 71 2 1
## 22816 16 71 2 1
## 22817 17 71 2 1
## 22818 18 71 2 1
## 22819 19 71 2 1
## 22820 20 71 2 1
## 22821 21 71 2 1
## 22822 22 71 2 1
## 22823 23 71 2 1
## 22824 24 71 2 1
## 22825 25 71 2 1
## 22826 26 71 2 1
## 22827 27 71 2 1
## 22828 28 71 2 1
## 22829 29 71 2 1
## 22830 30 71 2 1
## 22831 31 71 2 1
## 22832 32 71 2 1
## 22833 33 71 2 1
## 22834 34 71 2 1
## 22835 35 71 2 1
## 22836 36 71 2 1
## 22837 37 71 2 1
## 22838 38 71 2 1
## 22839 39 71 2 1
## 22840 40 71 2 1
## 22841 41 71 2 1
## 22842 42 71 2 1
## 22843 43 71 2 1
## 22844 44 71 2 1
## 22845 45 71 2 1
## 22846 46 71 2 1
## 22847 47 71 2 1
## 22848 48 71 2 1
## 22849 49 71 2 1
## 22850 50 71 2 1
## 22851 51 71 2 1
## 22852 52 71 2 1
## 22853 53 71 2 1
## 22854 54 71 2 1
## 22855 55 71 2 1
## 22856 56 71 2 1
## 22857 57 71 2 1
## 22858 58 71 2 1
## 22859 59 71 2 1
## 22860 60 71 2 1
## 22861 61 71 2 1
## 22862 62 71 2 1
## 22863 63 71 2 1
## 22864 64 71 2 1
## 22865 65 71 2 1
## 22866 66 71 2 1
## 22867 67 71 2 1
## 22868 68 71 2 1
## 22869 69 71 2 1
## 22870 70 71 2 1
## 22871 71 71 2 1
## 22872 72 71 2 1
## 22873 73 71 2 1
## 22874 74 71 2 1
## 22875 75 71 2 1
## 22876 76 71 2 1
## 22877 77 71 2 1
## 22878 78 71 2 1
## 22879 79 71 2 1
## 22880 80 71 2 1
## 22881 81 71 2 1
## 22882 82 71 2 1
## 22883 83 71 2 1
## 22884 84 71 2 1
## 22885 85 71 2 1
## 22886 86 71 2 1
## 22887 87 71 2 1
## 22888 88 71 2 1
## 22889 89 71 2 1
## 22890 90 71 2 1
## 22891 91 71 2 1
## 22892 92 71 2 1
## 22893 93 71 2 1
## 22894 94 71 2 1
## 22895 95 71 2 1
## 22896 96 71 2 1
## 22897 97 71 2 1
## 22898 98 71 2 1
## 22899 99 71 2 1
## 22900 100 71 2 1
## 22901 101 71 2 1
## 22902 102 71 2 1
## 22903 103 71 2 1
## 22904 104 71 2 1
## 22905 105 71 2 1
## 22906 106 71 2 1
## 22907 107 71 2 1
## 22908 108 71 2 1
## 22909 109 71 2 1
## 22910 110 71 2 1
## 22911 111 71 2 1
## 22912 112 71 2 1
## 22913 113 71 2 1
## 22914 114 71 2 1
## 22915 115 71 2 1
## 22916 116 71 2 1
## 22917 117 71 2 1
## 22918 118 71 2 1
## 22919 119 71 2 1
## 22920 120 71 2 1
## 22921 1 72 2 1
## 22922 2 72 2 1
## 22923 3 72 2 1
## 22924 4 72 2 1
## 22925 5 72 2 1
## 22926 6 72 2 1
## 22927 7 72 2 1
## 22928 8 72 2 1
## 22929 9 72 2 1
## 22930 10 72 2 1
## 22931 11 72 2 1
## 22932 12 72 2 1
## 22933 13 72 2 1
## 22934 14 72 2 1
## 22935 15 72 2 1
## 22936 16 72 2 1
## 22937 17 72 2 1
## 22938 18 72 2 1
## 22939 19 72 2 1
## 22940 20 72 2 1
## 22941 21 72 2 1
## 22942 22 72 2 1
## 22943 23 72 2 1
## 22944 24 72 2 1
## 22945 25 72 2 1
## 22946 26 72 2 1
## 22947 27 72 2 1
## 22948 28 72 2 1
## 22949 29 72 2 1
## 22950 30 72 2 1
## 22951 31 72 2 1
## 22952 32 72 2 1
## 22953 33 72 2 1
## 22954 34 72 2 1
## 22955 35 72 2 1
## 22956 36 72 2 1
## 22957 37 72 2 1
## 22958 38 72 2 1
## 22959 39 72 2 1
## 22960 40 72 2 1
## 22961 41 72 2 1
## 22962 42 72 2 1
## 22963 43 72 2 1
## 22964 44 72 2 1
## 22965 45 72 2 1
## 22966 46 72 2 1
## 22967 47 72 2 1
## 22968 48 72 2 1
## 22969 49 72 2 1
## 22970 50 72 2 1
## 22971 51 72 2 1
## 22972 52 72 2 1
## 22973 53 72 2 1
## 22974 54 72 2 1
## 22975 55 72 2 1
## 22976 56 72 2 1
## 22977 57 72 2 1
## 22978 58 72 2 1
## 22979 59 72 2 1
## 22980 60 72 2 1
## 22981 61 72 2 1
## 22982 62 72 2 1
## 22983 63 72 2 1
## 22984 64 72 2 1
## 22985 65 72 2 1
## 22986 66 72 2 1
## 22987 67 72 2 1
## 22988 68 72 2 1
## 22989 69 72 2 1
## 22990 70 72 2 1
## 22991 71 72 2 1
## 22992 72 72 2 1
## 22993 73 72 2 1
## 22994 74 72 2 1
## 22995 75 72 2 1
## 22996 76 72 2 1
## 22997 77 72 2 1
## 22998 78 72 2 1
## 22999 79 72 2 1
## 23000 80 72 2 1
## 23001 81 72 2 1
## 23002 82 72 2 1
## 23003 83 72 2 1
## 23004 84 72 2 1
## 23005 85 72 2 1
## 23006 86 72 2 1
## 23007 87 72 2 1
## 23008 88 72 2 1
## 23009 89 72 2 1
## 23010 90 72 2 1
## 23011 91 72 2 1
## 23012 92 72 2 1
## 23013 93 72 2 1
## 23014 94 72 2 1
## 23015 95 72 2 1
## 23016 96 72 2 1
## 23017 97 72 2 1
## 23018 98 72 2 1
## 23019 99 72 2 1
## 23020 100 72 2 1
## 23021 101 72 2 1
## 23022 102 72 2 1
## 23023 103 72 2 1
## 23024 104 72 2 1
## 23025 105 72 2 1
## 23026 106 72 2 1
## 23027 107 72 2 1
## 23028 108 72 2 1
## 23029 109 72 2 1
## 23030 110 72 2 1
## 23031 111 72 2 1
## 23032 112 72 2 1
## 23033 113 72 2 1
## 23034 114 72 2 1
## 23035 115 72 2 1
## 23036 116 72 2 1
## 23037 117 72 2 1
## 23038 118 72 2 1
## 23039 119 72 2 1
## 23040 120 72 2 1
## 23041 1 73 2 1
## 23042 2 73 2 1
## 23043 3 73 2 1
## 23044 4 73 2 1
## 23045 5 73 2 1
## 23046 6 73 2 1
## 23047 7 73 2 1
## 23048 8 73 2 1
## 23049 9 73 2 1
## 23050 10 73 2 1
## 23051 11 73 2 1
## 23052 12 73 2 1
## 23053 13 73 2 1
## 23054 14 73 2 1
## 23055 15 73 2 1
## 23056 16 73 2 1
## 23057 17 73 2 1
## 23058 18 73 2 1
## 23059 19 73 2 1
## 23060 20 73 2 1
## 23061 21 73 2 1
## 23062 22 73 2 1
## 23063 23 73 2 1
## 23064 24 73 2 1
## 23065 25 73 2 1
## 23066 26 73 2 1
## 23067 27 73 2 1
## 23068 28 73 2 1
## 23069 29 73 2 1
## 23070 30 73 2 1
## 23071 31 73 2 1
## 23072 32 73 2 1
## 23073 33 73 2 1
## 23074 34 73 2 1
## 23075 35 73 2 1
## 23076 36 73 2 1
## 23077 37 73 2 1
## 23078 38 73 2 1
## 23079 39 73 2 1
## 23080 40 73 2 1
## 23081 41 73 2 1
## 23082 42 73 2 1
## 23083 43 73 2 1
## 23084 44 73 2 1
## 23085 45 73 2 1
## 23086 46 73 2 1
## 23087 47 73 2 1
## 23088 48 73 2 1
## 23089 49 73 2 1
## 23090 50 73 2 1
## 23091 51 73 2 1
## 23092 52 73 2 1
## 23093 53 73 2 1
## 23094 54 73 2 1
## 23095 55 73 2 1
## 23096 56 73 2 1
## 23097 57 73 2 1
## 23098 58 73 2 1
## 23099 59 73 2 1
## 23100 60 73 2 1
## 23101 61 73 2 1
## 23102 62 73 2 1
## 23103 63 73 2 1
## 23104 64 73 2 1
## 23105 65 73 2 1
## 23106 66 73 2 1
## 23107 67 73 2 1
## 23108 68 73 2 1
## 23109 69 73 2 1
## 23110 70 73 2 1
## 23111 71 73 2 1
## 23112 72 73 2 1
## 23113 73 73 2 1
## 23114 74 73 2 1
## 23115 75 73 2 1
## 23116 76 73 2 1
## 23117 77 73 2 1
## 23118 78 73 2 1
## 23119 79 73 2 1
## 23120 80 73 2 1
## 23121 81 73 2 1
## 23122 82 73 2 1
## 23123 83 73 2 1
## 23124 84 73 2 1
## 23125 85 73 2 1
## 23126 86 73 2 1
## 23127 87 73 2 1
## 23128 88 73 2 1
## 23129 89 73 2 1
## 23130 90 73 2 1
## 23131 91 73 2 1
## 23132 92 73 2 1
## 23133 93 73 2 1
## 23134 94 73 2 1
## 23135 95 73 2 1
## 23136 96 73 2 1
## 23137 97 73 2 1
## 23138 98 73 2 1
## 23139 99 73 2 1
## 23140 100 73 2 1
## 23141 101 73 2 1
## 23142 102 73 2 1
## 23143 103 73 2 1
## 23144 104 73 2 1
## 23145 105 73 2 1
## 23146 106 73 2 1
## 23147 107 73 2 1
## 23148 108 73 2 1
## 23149 109 73 2 1
## 23150 110 73 2 1
## 23151 111 73 2 1
## 23152 112 73 2 1
## 23153 113 73 2 1
## 23154 114 73 2 1
## 23155 115 73 2 1
## 23156 116 73 2 1
## 23157 117 73 2 1
## 23158 118 73 2 1
## 23159 119 73 2 1
## 23160 120 73 2 1
## 23161 1 74 2 1
## 23162 2 74 2 1
## 23163 3 74 2 1
## 23164 4 74 2 1
## 23165 5 74 2 1
## 23166 6 74 2 1
## 23167 7 74 2 1
## 23168 8 74 2 1
## 23169 9 74 2 1
## 23170 10 74 2 1
## 23171 11 74 2 1
## 23172 12 74 2 1
## 23173 13 74 2 1
## 23174 14 74 2 1
## 23175 15 74 2 1
## 23176 16 74 2 1
## 23177 17 74 2 1
## 23178 18 74 2 1
## 23179 19 74 2 1
## 23180 20 74 2 1
## 23181 21 74 2 1
## 23182 22 74 2 1
## 23183 23 74 2 1
## 23184 24 74 2 1
## 23185 25 74 2 1
## 23186 26 74 2 1
## 23187 27 74 2 1
## 23188 28 74 2 1
## 23189 29 74 2 1
## 23190 30 74 2 1
## 23191 31 74 2 1
## 23192 32 74 2 1
## 23193 33 74 2 1
## 23194 34 74 2 1
## 23195 35 74 2 1
## 23196 36 74 2 1
## 23197 37 74 2 1
## 23198 38 74 2 1
## 23199 39 74 2 1
## 23200 40 74 2 1
## 23201 41 74 2 1
## 23202 42 74 2 1
## 23203 43 74 2 1
## 23204 44 74 2 1
## 23205 45 74 2 1
## 23206 46 74 2 1
## 23207 47 74 2 1
## 23208 48 74 2 1
## 23209 49 74 2 1
## 23210 50 74 2 1
## 23211 51 74 2 1
## 23212 52 74 2 1
## 23213 53 74 2 1
## 23214 54 74 2 1
## 23215 55 74 2 1
## 23216 56 74 2 1
## 23217 57 74 2 1
## 23218 58 74 2 1
## 23219 59 74 2 1
## 23220 60 74 2 1
## 23221 61 74 2 1
## 23222 62 74 2 1
## 23223 63 74 2 1
## 23224 64 74 2 1
## 23225 65 74 2 1
## 23226 66 74 2 1
## 23227 67 74 2 1
## 23228 68 74 2 1
## 23229 69 74 2 1
## 23230 70 74 2 1
## 23231 71 74 2 1
## 23232 72 74 2 1
## 23233 73 74 2 1
## 23234 74 74 2 1
## 23235 75 74 2 1
## 23236 76 74 2 1
## 23237 77 74 2 1
## 23238 78 74 2 1
## 23239 79 74 2 1
## 23240 80 74 2 1
## 23241 81 74 2 1
## 23242 82 74 2 1
## 23243 83 74 2 1
## 23244 84 74 2 1
## 23245 85 74 2 1
## 23246 86 74 2 1
## 23247 87 74 2 1
## 23248 88 74 2 1
## 23249 89 74 2 1
## 23250 90 74 2 1
## 23251 91 74 2 1
## 23252 92 74 2 1
## 23253 93 74 2 1
## 23254 94 74 2 1
## 23255 95 74 2 1
## 23256 96 74 2 1
## 23257 97 74 2 1
## 23258 98 74 2 1
## 23259 99 74 2 1
## 23260 100 74 2 1
## 23261 101 74 2 1
## 23262 102 74 2 1
## 23263 103 74 2 1
## 23264 104 74 2 1
## 23265 105 74 2 1
## 23266 106 74 2 1
## 23267 107 74 2 1
## 23268 108 74 2 1
## 23269 109 74 2 1
## 23270 110 74 2 1
## 23271 111 74 2 1
## 23272 112 74 2 1
## 23273 113 74 2 1
## 23274 114 74 2 1
## 23275 115 74 2 1
## 23276 116 74 2 1
## 23277 117 74 2 1
## 23278 118 74 2 1
## 23279 119 74 2 1
## 23280 120 74 2 1
## 23281 1 75 2 1
## 23282 2 75 2 1
## 23283 3 75 2 1
## 23284 4 75 2 1
## 23285 5 75 2 1
## 23286 6 75 2 1
## 23287 7 75 2 1
## 23288 8 75 2 1
## 23289 9 75 2 1
## 23290 10 75 2 1
## 23291 11 75 2 1
## 23292 12 75 2 1
## 23293 13 75 2 1
## 23294 14 75 2 1
## 23295 15 75 2 1
## 23296 16 75 2 1
## 23297 17 75 2 1
## 23298 18 75 2 1
## 23299 19 75 2 1
## 23300 20 75 2 1
## 23301 21 75 2 1
## 23302 22 75 2 1
## 23303 23 75 2 1
## 23304 24 75 2 1
## 23305 25 75 2 1
## 23306 26 75 2 1
## 23307 27 75 2 1
## 23308 28 75 2 1
## 23309 29 75 2 1
## 23310 30 75 2 1
## 23311 31 75 2 1
## 23312 32 75 2 1
## 23313 33 75 2 1
## 23314 34 75 2 1
## 23315 35 75 2 1
## 23316 36 75 2 1
## 23317 37 75 2 1
## 23318 38 75 2 1
## 23319 39 75 2 1
## 23320 40 75 2 1
## 23321 41 75 2 1
## 23322 42 75 2 1
## 23323 43 75 2 1
## 23324 44 75 2 1
## 23325 45 75 2 1
## 23326 46 75 2 1
## 23327 47 75 2 1
## 23328 48 75 2 1
## 23329 49 75 2 1
## 23330 50 75 2 1
## 23331 51 75 2 1
## 23332 52 75 2 1
## 23333 53 75 2 1
## 23334 54 75 2 1
## 23335 55 75 2 1
## 23336 56 75 2 1
## 23337 57 75 2 1
## 23338 58 75 2 1
## 23339 59 75 2 1
## 23340 60 75 2 1
## 23341 61 75 2 1
## 23342 62 75 2 1
## 23343 63 75 2 1
## 23344 64 75 2 1
## 23345 65 75 2 1
## 23346 66 75 2 1
## 23347 67 75 2 1
## 23348 68 75 2 1
## 23349 69 75 2 1
## 23350 70 75 2 1
## 23351 71 75 2 1
## 23352 72 75 2 1
## 23353 73 75 2 1
## 23354 74 75 2 1
## 23355 75 75 2 1
## 23356 76 75 2 1
## 23357 77 75 2 1
## 23358 78 75 2 1
## 23359 79 75 2 1
## 23360 80 75 2 1
## 23361 81 75 2 1
## 23362 82 75 2 1
## 23363 83 75 2 1
## 23364 84 75 2 1
## 23365 85 75 2 1
## 23366 86 75 2 1
## 23367 87 75 2 1
## 23368 88 75 2 1
## 23369 89 75 2 1
## 23370 90 75 2 1
## 23371 91 75 2 1
## 23372 92 75 2 1
## 23373 93 75 2 1
## 23374 94 75 2 1
## 23375 95 75 2 1
## 23376 96 75 2 1
## 23377 97 75 2 1
## 23378 98 75 2 1
## 23379 99 75 2 1
## 23380 100 75 2 1
## 23381 101 75 2 1
## 23382 102 75 2 1
## 23383 103 75 2 1
## 23384 104 75 2 1
## 23385 105 75 2 1
## 23386 106 75 2 1
## 23387 107 75 2 1
## 23388 108 75 2 1
## 23389 109 75 2 1
## 23390 110 75 2 1
## 23391 111 75 2 1
## 23392 112 75 2 1
## 23393 113 75 2 1
## 23394 114 75 2 1
## 23395 115 75 2 1
## 23396 116 75 2 1
## 23397 117 75 2 1
## 23398 118 75 2 1
## 23399 119 75 2 1
## 23400 120 75 2 1
## 23401 1 76 2 1
## 23402 2 76 2 1
## 23403 3 76 2 1
## 23404 4 76 2 1
## 23405 5 76 2 1
## 23406 6 76 2 1
## 23407 7 76 2 1
## 23408 8 76 2 1
## 23409 9 76 2 1
## 23410 10 76 2 1
## 23411 11 76 2 1
## 23412 12 76 2 1
## 23413 13 76 2 1
## 23414 14 76 2 1
## 23415 15 76 2 1
## 23416 16 76 2 1
## 23417 17 76 2 1
## 23418 18 76 2 1
## 23419 19 76 2 1
## 23420 20 76 2 1
## 23421 21 76 2 1
## 23422 22 76 2 1
## 23423 23 76 2 1
## 23424 24 76 2 1
## 23425 25 76 2 1
## 23426 26 76 2 1
## 23427 27 76 2 1
## 23428 28 76 2 1
## 23429 29 76 2 1
## 23430 30 76 2 1
## 23431 31 76 2 1
## 23432 32 76 2 1
## 23433 33 76 2 1
## 23434 34 76 2 1
## 23435 35 76 2 1
## 23436 36 76 2 1
## 23437 37 76 2 1
## 23438 38 76 2 1
## 23439 39 76 2 1
## 23440 40 76 2 1
## 23441 41 76 2 1
## 23442 42 76 2 1
## 23443 43 76 2 1
## 23444 44 76 2 1
## 23445 45 76 2 1
## 23446 46 76 2 1
## 23447 47 76 2 1
## 23448 48 76 2 1
## 23449 49 76 2 1
## 23450 50 76 2 1
## 23451 51 76 2 1
## 23452 52 76 2 1
## 23453 53 76 2 1
## 23454 54 76 2 1
## 23455 55 76 2 1
## 23456 56 76 2 1
## 23457 57 76 2 1
## 23458 58 76 2 1
## 23459 59 76 2 1
## 23460 60 76 2 1
## 23461 61 76 2 1
## 23462 62 76 2 1
## 23463 63 76 2 1
## 23464 64 76 2 1
## 23465 65 76 2 1
## 23466 66 76 2 1
## 23467 67 76 2 1
## 23468 68 76 2 1
## 23469 69 76 2 1
## 23470 70 76 2 1
## 23471 71 76 2 1
## 23472 72 76 2 1
## 23473 73 76 2 1
## 23474 74 76 2 1
## 23475 75 76 2 1
## 23476 76 76 2 1
## 23477 77 76 2 1
## 23478 78 76 2 1
## 23479 79 76 2 1
## 23480 80 76 2 1
## 23481 81 76 2 1
## 23482 82 76 2 1
## 23483 83 76 2 1
## 23484 84 76 2 1
## 23485 85 76 2 1
## 23486 86 76 2 1
## 23487 87 76 2 1
## 23488 88 76 2 1
## 23489 89 76 2 1
## 23490 90 76 2 1
## 23491 91 76 2 1
## 23492 92 76 2 1
## 23493 93 76 2 1
## 23494 94 76 2 1
## 23495 95 76 2 1
## 23496 96 76 2 1
## 23497 97 76 2 1
## 23498 98 76 2 1
## 23499 99 76 2 1
## 23500 100 76 2 1
## 23501 101 76 2 1
## 23502 102 76 2 1
## 23503 103 76 2 1
## 23504 104 76 2 1
## 23505 105 76 2 1
## 23506 106 76 2 1
## 23507 107 76 2 1
## 23508 108 76 2 1
## 23509 109 76 2 1
## 23510 110 76 2 1
## 23511 111 76 2 1
## 23512 112 76 2 1
## 23513 113 76 2 1
## 23514 114 76 2 1
## 23515 115 76 2 1
## 23516 116 76 2 1
## 23517 117 76 2 1
## 23518 118 76 2 1
## 23519 119 76 2 1
## 23520 120 76 2 1
## 23521 1 77 2 1
## 23522 2 77 2 1
## 23523 3 77 2 1
## 23524 4 77 2 1
## 23525 5 77 2 1
## 23526 6 77 2 1
## 23527 7 77 2 1
## 23528 8 77 2 1
## 23529 9 77 2 1
## 23530 10 77 2 1
## 23531 11 77 2 1
## 23532 12 77 2 1
## 23533 13 77 2 1
## 23534 14 77 2 1
## 23535 15 77 2 1
## 23536 16 77 2 1
## 23537 17 77 2 1
## 23538 18 77 2 1
## 23539 19 77 2 1
## 23540 20 77 2 1
## 23541 21 77 2 1
## 23542 22 77 2 1
## 23543 23 77 2 1
## 23544 24 77 2 1
## 23545 25 77 2 1
## 23546 26 77 2 1
## 23547 27 77 2 1
## 23548 28 77 2 1
## 23549 29 77 2 1
## 23550 30 77 2 1
## 23551 31 77 2 1
## 23552 32 77 2 1
## 23553 33 77 2 1
## 23554 34 77 2 1
## 23555 35 77 2 1
## 23556 36 77 2 1
## 23557 37 77 2 1
## 23558 38 77 2 1
## 23559 39 77 2 1
## 23560 40 77 2 1
## 23561 41 77 2 1
## 23562 42 77 2 1
## 23563 43 77 2 1
## 23564 44 77 2 1
## 23565 45 77 2 1
## 23566 46 77 2 1
## 23567 47 77 2 1
## 23568 48 77 2 1
## 23569 49 77 2 1
## 23570 50 77 2 1
## 23571 51 77 2 1
## 23572 52 77 2 1
## 23573 53 77 2 1
## 23574 54 77 2 1
## 23575 55 77 2 1
## 23576 56 77 2 1
## 23577 57 77 2 1
## 23578 58 77 2 1
## 23579 59 77 2 1
## 23580 60 77 2 1
## 23581 61 77 2 1
## 23582 62 77 2 1
## 23583 63 77 2 1
## 23584 64 77 2 1
## 23585 65 77 2 1
## 23586 66 77 2 1
## 23587 67 77 2 1
## 23588 68 77 2 1
## 23589 69 77 2 1
## 23590 70 77 2 1
## 23591 71 77 2 1
## 23592 72 77 2 1
## 23593 73 77 2 1
## 23594 74 77 2 1
## 23595 75 77 2 1
## 23596 76 77 2 1
## 23597 77 77 2 1
## 23598 78 77 2 1
## 23599 79 77 2 1
## 23600 80 77 2 1
## 23601 81 77 2 1
## 23602 82 77 2 1
## 23603 83 77 2 1
## 23604 84 77 2 1
## 23605 85 77 2 1
## 23606 86 77 2 1
## 23607 87 77 2 1
## 23608 88 77 2 1
## 23609 89 77 2 1
## 23610 90 77 2 1
## 23611 91 77 2 1
## 23612 92 77 2 1
## 23613 93 77 2 1
## 23614 94 77 2 1
## 23615 95 77 2 1
## 23616 96 77 2 1
## 23617 97 77 2 1
## 23618 98 77 2 1
## 23619 99 77 2 1
## 23620 100 77 2 1
## 23621 101 77 2 1
## 23622 102 77 2 1
## 23623 103 77 2 1
## 23624 104 77 2 1
## 23625 105 77 2 1
## 23626 106 77 2 1
## 23627 107 77 2 1
## 23628 108 77 2 1
## 23629 109 77 2 1
## 23630 110 77 2 1
## 23631 111 77 2 1
## 23632 112 77 2 1
## 23633 113 77 2 1
## 23634 114 77 2 1
## 23635 115 77 2 1
## 23636 116 77 2 1
## 23637 117 77 2 1
## 23638 118 77 2 1
## 23639 119 77 2 1
## 23640 120 77 2 1
## 23641 1 78 2 1
## 23642 2 78 2 1
## 23643 3 78 2 1
## 23644 4 78 2 1
## 23645 5 78 2 1
## 23646 6 78 2 1
## 23647 7 78 2 1
## 23648 8 78 2 1
## 23649 9 78 2 1
## 23650 10 78 2 1
## 23651 11 78 2 1
## 23652 12 78 2 1
## 23653 13 78 2 1
## 23654 14 78 2 1
## 23655 15 78 2 1
## 23656 16 78 2 1
## 23657 17 78 2 1
## 23658 18 78 2 1
## 23659 19 78 2 1
## 23660 20 78 2 1
## 23661 21 78 2 1
## 23662 22 78 2 1
## 23663 23 78 2 1
## 23664 24 78 2 1
## 23665 25 78 2 1
## 23666 26 78 2 1
## 23667 27 78 2 1
## 23668 28 78 2 1
## 23669 29 78 2 1
## 23670 30 78 2 1
## 23671 31 78 2 1
## 23672 32 78 2 1
## 23673 33 78 2 1
## 23674 34 78 2 1
## 23675 35 78 2 1
## 23676 36 78 2 1
## 23677 37 78 2 1
## 23678 38 78 2 1
## 23679 39 78 2 1
## 23680 40 78 2 1
## 23681 41 78 2 1
## 23682 42 78 2 1
## 23683 43 78 2 1
## 23684 44 78 2 1
## 23685 45 78 2 1
## 23686 46 78 2 1
## 23687 47 78 2 1
## 23688 48 78 2 1
## 23689 49 78 2 1
## 23690 50 78 2 1
## 23691 51 78 2 1
## 23692 52 78 2 1
## 23693 53 78 2 1
## 23694 54 78 2 1
## 23695 55 78 2 1
## 23696 56 78 2 1
## 23697 57 78 2 1
## 23698 58 78 2 1
## 23699 59 78 2 1
## 23700 60 78 2 1
## 23701 61 78 2 1
## 23702 62 78 2 1
## 23703 63 78 2 1
## 23704 64 78 2 1
## 23705 65 78 2 1
## 23706 66 78 2 1
## 23707 67 78 2 1
## 23708 68 78 2 1
## 23709 69 78 2 1
## 23710 70 78 2 1
## 23711 71 78 2 1
## 23712 72 78 2 1
## 23713 73 78 2 1
## 23714 74 78 2 1
## 23715 75 78 2 1
## 23716 76 78 2 1
## 23717 77 78 2 1
## 23718 78 78 2 1
## 23719 79 78 2 1
## 23720 80 78 2 1
## 23721 81 78 2 1
## 23722 82 78 2 1
## 23723 83 78 2 1
## 23724 84 78 2 1
## 23725 85 78 2 1
## 23726 86 78 2 1
## 23727 87 78 2 1
## 23728 88 78 2 1
## 23729 89 78 2 1
## 23730 90 78 2 1
## 23731 91 78 2 1
## 23732 92 78 2 1
## 23733 93 78 2 1
## 23734 94 78 2 1
## 23735 95 78 2 1
## 23736 96 78 2 1
## 23737 97 78 2 1
## 23738 98 78 2 1
## 23739 99 78 2 1
## 23740 100 78 2 1
## 23741 101 78 2 1
## 23742 102 78 2 1
## 23743 103 78 2 1
## 23744 104 78 2 1
## 23745 105 78 2 1
## 23746 106 78 2 1
## 23747 107 78 2 1
## 23748 108 78 2 1
## 23749 109 78 2 1
## 23750 110 78 2 1
## 23751 111 78 2 1
## 23752 112 78 2 1
## 23753 113 78 2 1
## 23754 114 78 2 1
## 23755 115 78 2 1
## 23756 116 78 2 1
## 23757 117 78 2 1
## 23758 118 78 2 1
## 23759 119 78 2 1
## 23760 120 78 2 1
## 23761 1 79 2 1
## 23762 2 79 2 1
## 23763 3 79 2 1
## 23764 4 79 2 1
## 23765 5 79 2 1
## 23766 6 79 2 1
## 23767 7 79 2 1
## 23768 8 79 2 1
## 23769 9 79 2 1
## 23770 10 79 2 1
## 23771 11 79 2 1
## 23772 12 79 2 1
## 23773 13 79 2 1
## 23774 14 79 2 1
## 23775 15 79 2 1
## 23776 16 79 2 1
## 23777 17 79 2 1
## 23778 18 79 2 1
## 23779 19 79 2 1
## 23780 20 79 2 1
## 23781 21 79 2 1
## 23782 22 79 2 1
## 23783 23 79 2 1
## 23784 24 79 2 1
## 23785 25 79 2 1
## 23786 26 79 2 1
## 23787 27 79 2 1
## 23788 28 79 2 1
## 23789 29 79 2 1
## 23790 30 79 2 1
## 23791 31 79 2 1
## 23792 32 79 2 1
## 23793 33 79 2 1
## 23794 34 79 2 1
## 23795 35 79 2 1
## 23796 36 79 2 1
## 23797 37 79 2 1
## 23798 38 79 2 1
## 23799 39 79 2 1
## 23800 40 79 2 1
## 23801 41 79 2 1
## 23802 42 79 2 1
## 23803 43 79 2 1
## 23804 44 79 2 1
## 23805 45 79 2 1
## 23806 46 79 2 1
## 23807 47 79 2 1
## 23808 48 79 2 1
## 23809 49 79 2 1
## 23810 50 79 2 1
## 23811 51 79 2 1
## 23812 52 79 2 1
## 23813 53 79 2 1
## 23814 54 79 2 1
## 23815 55 79 2 1
## 23816 56 79 2 1
## 23817 57 79 2 1
## 23818 58 79 2 1
## 23819 59 79 2 1
## 23820 60 79 2 1
## 23821 61 79 2 1
## 23822 62 79 2 1
## 23823 63 79 2 1
## 23824 64 79 2 1
## 23825 65 79 2 1
## 23826 66 79 2 1
## 23827 67 79 2 1
## 23828 68 79 2 1
## 23829 69 79 2 1
## 23830 70 79 2 1
## 23831 71 79 2 1
## 23832 72 79 2 1
## 23833 73 79 2 1
## 23834 74 79 2 1
## 23835 75 79 2 1
## 23836 76 79 2 1
## 23837 77 79 2 1
## 23838 78 79 2 1
## 23839 79 79 2 1
## 23840 80 79 2 1
## 23841 81 79 2 1
## 23842 82 79 2 1
## 23843 83 79 2 1
## 23844 84 79 2 1
## 23845 85 79 2 1
## 23846 86 79 2 1
## 23847 87 79 2 1
## 23848 88 79 2 1
## 23849 89 79 2 1
## 23850 90 79 2 1
## 23851 91 79 2 1
## 23852 92 79 2 1
## 23853 93 79 2 1
## 23854 94 79 2 1
## 23855 95 79 2 1
## 23856 96 79 2 1
## 23857 97 79 2 1
## 23858 98 79 2 1
## 23859 99 79 2 1
## 23860 100 79 2 1
## 23861 101 79 2 1
## 23862 102 79 2 1
## 23863 103 79 2 1
## 23864 104 79 2 1
## 23865 105 79 2 1
## 23866 106 79 2 1
## 23867 107 79 2 1
## 23868 108 79 2 1
## 23869 109 79 2 1
## 23870 110 79 2 1
## 23871 111 79 2 1
## 23872 112 79 2 1
## 23873 113 79 2 1
## 23874 114 79 2 1
## 23875 115 79 2 1
## 23876 116 79 2 1
## 23877 117 79 2 1
## 23878 118 79 2 1
## 23879 119 79 2 1
## 23880 120 79 2 1
## 23881 1 80 2 1
## 23882 2 80 2 1
## 23883 3 80 2 1
## 23884 4 80 2 1
## 23885 5 80 2 1
## 23886 6 80 2 1
## 23887 7 80 2 1
## 23888 8 80 2 1
## 23889 9 80 2 1
## 23890 10 80 2 1
## 23891 11 80 2 1
## 23892 12 80 2 1
## 23893 13 80 2 1
## 23894 14 80 2 1
## 23895 15 80 2 1
## 23896 16 80 2 1
## 23897 17 80 2 1
## 23898 18 80 2 1
## 23899 19 80 2 1
## 23900 20 80 2 1
## 23901 21 80 2 1
## 23902 22 80 2 1
## 23903 23 80 2 1
## 23904 24 80 2 1
## 23905 25 80 2 1
## 23906 26 80 2 1
## 23907 27 80 2 1
## 23908 28 80 2 1
## 23909 29 80 2 1
## 23910 30 80 2 1
## 23911 31 80 2 1
## 23912 32 80 2 1
## 23913 33 80 2 1
## 23914 34 80 2 1
## 23915 35 80 2 1
## 23916 36 80 2 1
## 23917 37 80 2 1
## 23918 38 80 2 1
## 23919 39 80 2 1
## 23920 40 80 2 1
## 23921 41 80 2 1
## 23922 42 80 2 1
## 23923 43 80 2 1
## 23924 44 80 2 1
## 23925 45 80 2 1
## 23926 46 80 2 1
## 23927 47 80 2 1
## 23928 48 80 2 1
## 23929 49 80 2 1
## 23930 50 80 2 1
## 23931 51 80 2 1
## 23932 52 80 2 1
## 23933 53 80 2 1
## 23934 54 80 2 1
## 23935 55 80 2 1
## 23936 56 80 2 1
## 23937 57 80 2 1
## 23938 58 80 2 1
## 23939 59 80 2 1
## 23940 60 80 2 1
## 23941 61 80 2 1
## 23942 62 80 2 1
## 23943 63 80 2 1
## 23944 64 80 2 1
## 23945 65 80 2 1
## 23946 66 80 2 1
## 23947 67 80 2 1
## 23948 68 80 2 1
## 23949 69 80 2 1
## 23950 70 80 2 1
## 23951 71 80 2 1
## 23952 72 80 2 1
## 23953 73 80 2 1
## 23954 74 80 2 1
## 23955 75 80 2 1
## 23956 76 80 2 1
## 23957 77 80 2 1
## 23958 78 80 2 1
## 23959 79 80 2 1
## 23960 80 80 2 1
## 23961 81 80 2 1
## 23962 82 80 2 1
## 23963 83 80 2 1
## 23964 84 80 2 1
## 23965 85 80 2 1
## 23966 86 80 2 1
## 23967 87 80 2 1
## 23968 88 80 2 1
## 23969 89 80 2 1
## 23970 90 80 2 1
## 23971 91 80 2 1
## 23972 92 80 2 1
## 23973 93 80 2 1
## 23974 94 80 2 1
## 23975 95 80 2 1
## 23976 96 80 2 1
## 23977 97 80 2 1
## 23978 98 80 2 1
## 23979 99 80 2 1
## 23980 100 80 2 1
## 23981 101 80 2 1
## 23982 102 80 2 1
## 23983 103 80 2 1
## 23984 104 80 2 1
## 23985 105 80 2 1
## 23986 106 80 2 1
## 23987 107 80 2 1
## 23988 108 80 2 1
## 23989 109 80 2 1
## 23990 110 80 2 1
## 23991 111 80 2 1
## 23992 112 80 2 1
## 23993 113 80 2 1
## 23994 114 80 2 1
## 23995 115 80 2 1
## 23996 116 80 2 1
## 23997 117 80 2 1
## 23998 118 80 2 1
## 23999 119 80 2 1
## 24000 120 80 2 1
## 24001 1 81 2 1
## 24002 2 81 2 1
## 24003 3 81 2 1
## 24004 4 81 2 1
## 24005 5 81 2 1
## 24006 6 81 2 1
## 24007 7 81 2 1
## 24008 8 81 2 1
## 24009 9 81 2 1
## 24010 10 81 2 1
## 24011 11 81 2 1
## 24012 12 81 2 1
## 24013 13 81 2 1
## 24014 14 81 2 1
## 24015 15 81 2 1
## 24016 16 81 2 1
## 24017 17 81 2 1
## 24018 18 81 2 1
## 24019 19 81 2 1
## 24020 20 81 2 1
## 24021 21 81 2 1
## 24022 22 81 2 1
## 24023 23 81 2 1
## 24024 24 81 2 1
## 24025 25 81 2 1
## 24026 26 81 2 1
## 24027 27 81 2 1
## 24028 28 81 2 1
## 24029 29 81 2 1
## 24030 30 81 2 1
## 24031 31 81 2 1
## 24032 32 81 2 1
## 24033 33 81 2 1
## 24034 34 81 2 1
## 24035 35 81 2 1
## 24036 36 81 2 1
## 24037 37 81 2 1
## 24038 38 81 2 1
## 24039 39 81 2 1
## 24040 40 81 2 1
## 24041 41 81 2 1
## 24042 42 81 2 1
## 24043 43 81 2 1
## 24044 44 81 2 1
## 24045 45 81 2 1
## 24046 46 81 2 1
## 24047 47 81 2 1
## 24048 48 81 2 1
## 24049 49 81 2 1
## 24050 50 81 2 1
## 24051 51 81 2 1
## 24052 52 81 2 1
## 24053 53 81 2 1
## 24054 54 81 2 1
## 24055 55 81 2 1
## 24056 56 81 2 1
## 24057 57 81 2 1
## 24058 58 81 2 1
## 24059 59 81 2 1
## 24060 60 81 2 1
## 24061 61 81 2 1
## 24062 62 81 2 1
## 24063 63 81 2 1
## 24064 64 81 2 1
## 24065 65 81 2 1
## 24066 66 81 2 1
## 24067 67 81 2 1
## 24068 68 81 2 1
## 24069 69 81 2 1
## 24070 70 81 2 1
## 24071 71 81 2 1
## 24072 72 81 2 1
## 24073 73 81 2 1
## 24074 74 81 2 1
## 24075 75 81 2 1
## 24076 76 81 2 1
## 24077 77 81 2 1
## 24078 78 81 2 1
## 24079 79 81 2 1
## 24080 80 81 2 1
## 24081 81 81 2 1
## 24082 82 81 2 1
## 24083 83 81 2 1
## 24084 84 81 2 1
## 24085 85 81 2 1
## 24086 86 81 2 1
## 24087 87 81 2 1
## 24088 88 81 2 1
## 24089 89 81 2 1
## 24090 90 81 2 1
## 24091 91 81 2 1
## 24092 92 81 2 1
## 24093 93 81 2 1
## 24094 94 81 2 1
## 24095 95 81 2 1
## 24096 96 81 2 1
## 24097 97 81 2 1
## 24098 98 81 2 1
## 24099 99 81 2 1
## 24100 100 81 2 1
## 24101 101 81 2 1
## 24102 102 81 2 1
## 24103 103 81 2 1
## 24104 104 81 2 1
## 24105 105 81 2 1
## 24106 106 81 2 1
## 24107 107 81 2 1
## 24108 108 81 2 1
## 24109 109 81 2 1
## 24110 110 81 2 1
## 24111 111 81 2 1
## 24112 112 81 2 1
## 24113 113 81 2 1
## 24114 114 81 2 1
## 24115 115 81 2 1
## 24116 116 81 2 1
## 24117 117 81 2 1
## 24118 118 81 2 1
## 24119 119 81 2 1
## 24120 120 81 2 1
## 24121 1 82 2 1
## 24122 2 82 2 1
## 24123 3 82 2 1
## 24124 4 82 2 1
## 24125 5 82 2 1
## 24126 6 82 2 1
## 24127 7 82 2 1
## 24128 8 82 2 1
## 24129 9 82 2 1
## 24130 10 82 2 1
## 24131 11 82 2 1
## 24132 12 82 2 1
## 24133 13 82 2 1
## 24134 14 82 2 1
## 24135 15 82 2 1
## 24136 16 82 2 1
## 24137 17 82 2 1
## 24138 18 82 2 1
## 24139 19 82 2 1
## 24140 20 82 2 1
## 24141 21 82 2 1
## 24142 22 82 2 1
## 24143 23 82 2 1
## 24144 24 82 2 1
## 24145 25 82 2 1
## 24146 26 82 2 1
## 24147 27 82 2 1
## 24148 28 82 2 1
## 24149 29 82 2 1
## 24150 30 82 2 1
## 24151 31 82 2 1
## 24152 32 82 2 1
## 24153 33 82 2 1
## 24154 34 82 2 1
## 24155 35 82 2 1
## 24156 36 82 2 1
## 24157 37 82 2 1
## 24158 38 82 2 1
## 24159 39 82 2 1
## 24160 40 82 2 1
## 24161 41 82 2 1
## 24162 42 82 2 1
## 24163 43 82 2 1
## 24164 44 82 2 1
## 24165 45 82 2 1
## 24166 46 82 2 1
## 24167 47 82 2 1
## 24168 48 82 2 1
## 24169 49 82 2 1
## 24170 50 82 2 1
## 24171 51 82 2 1
## 24172 52 82 2 1
## 24173 53 82 2 1
## 24174 54 82 2 1
## 24175 55 82 2 1
## 24176 56 82 2 1
## 24177 57 82 2 1
## 24178 58 82 2 1
## 24179 59 82 2 1
## 24180 60 82 2 1
## 24181 61 82 2 1
## 24182 62 82 2 1
## 24183 63 82 2 1
## 24184 64 82 2 1
## 24185 65 82 2 1
## 24186 66 82 2 1
## 24187 67 82 2 1
## 24188 68 82 2 1
## 24189 69 82 2 1
## 24190 70 82 2 1
## 24191 71 82 2 1
## 24192 72 82 2 1
## 24193 73 82 2 1
## 24194 74 82 2 1
## 24195 75 82 2 1
## 24196 76 82 2 1
## 24197 77 82 2 1
## 24198 78 82 2 1
## 24199 79 82 2 1
## 24200 80 82 2 1
## 24201 81 82 2 1
## 24202 82 82 2 1
## 24203 83 82 2 1
## 24204 84 82 2 1
## 24205 85 82 2 1
## 24206 86 82 2 1
## 24207 87 82 2 1
## 24208 88 82 2 1
## 24209 89 82 2 1
## 24210 90 82 2 1
## 24211 91 82 2 1
## 24212 92 82 2 1
## 24213 93 82 2 1
## 24214 94 82 2 1
## 24215 95 82 2 1
## 24216 96 82 2 1
## 24217 97 82 2 1
## 24218 98 82 2 1
## 24219 99 82 2 1
## 24220 100 82 2 1
## 24221 101 82 2 1
## 24222 102 82 2 1
## 24223 103 82 2 1
## 24224 104 82 2 1
## 24225 105 82 2 1
## 24226 106 82 2 1
## 24227 107 82 2 1
## 24228 108 82 2 1
## 24229 109 82 2 1
## 24230 110 82 2 1
## 24231 111 82 2 1
## 24232 112 82 2 1
## 24233 113 82 2 1
## 24234 114 82 2 1
## 24235 115 82 2 1
## 24236 116 82 2 1
## 24237 117 82 2 1
## 24238 118 82 2 1
## 24239 119 82 2 1
## 24240 120 82 2 1
## 24241 1 83 2 1
## 24242 2 83 2 1
## 24243 3 83 2 1
## 24244 4 83 2 1
## 24245 5 83 2 1
## 24246 6 83 2 1
## 24247 7 83 2 1
## 24248 8 83 2 1
## 24249 9 83 2 1
## 24250 10 83 2 1
## 24251 11 83 2 1
## 24252 12 83 2 1
## 24253 13 83 2 1
## 24254 14 83 2 1
## 24255 15 83 2 1
## 24256 16 83 2 1
## 24257 17 83 2 1
## 24258 18 83 2 1
## 24259 19 83 2 1
## 24260 20 83 2 1
## 24261 21 83 2 1
## 24262 22 83 2 1
## 24263 23 83 2 1
## 24264 24 83 2 1
## 24265 25 83 2 1
## 24266 26 83 2 1
## 24267 27 83 2 1
## 24268 28 83 2 1
## 24269 29 83 2 1
## 24270 30 83 2 1
## 24271 31 83 2 1
## 24272 32 83 2 1
## 24273 33 83 2 1
## 24274 34 83 2 1
## 24275 35 83 2 1
## 24276 36 83 2 1
## 24277 37 83 2 1
## 24278 38 83 2 1
## 24279 39 83 2 1
## 24280 40 83 2 1
## 24281 41 83 2 1
## 24282 42 83 2 1
## 24283 43 83 2 1
## 24284 44 83 2 1
## 24285 45 83 2 1
## 24286 46 83 2 1
## 24287 47 83 2 1
## 24288 48 83 2 1
## 24289 49 83 2 1
## 24290 50 83 2 1
## 24291 51 83 2 1
## 24292 52 83 2 1
## 24293 53 83 2 1
## 24294 54 83 2 1
## 24295 55 83 2 1
## 24296 56 83 2 1
## 24297 57 83 2 1
## 24298 58 83 2 1
## 24299 59 83 2 1
## 24300 60 83 2 1
## 24301 61 83 2 1
## 24302 62 83 2 1
## 24303 63 83 2 1
## 24304 64 83 2 1
## 24305 65 83 2 1
## 24306 66 83 2 1
## 24307 67 83 2 1
## 24308 68 83 2 1
## 24309 69 83 2 1
## 24310 70 83 2 1
## 24311 71 83 2 1
## 24312 72 83 2 1
## 24313 73 83 2 1
## 24314 74 83 2 1
## 24315 75 83 2 1
## 24316 76 83 2 1
## 24317 77 83 2 1
## 24318 78 83 2 1
## 24319 79 83 2 1
## 24320 80 83 2 1
## 24321 81 83 2 1
## 24322 82 83 2 1
## 24323 83 83 2 1
## 24324 84 83 2 1
## 24325 85 83 2 1
## 24326 86 83 2 1
## 24327 87 83 2 1
## 24328 88 83 2 1
## 24329 89 83 2 1
## 24330 90 83 2 1
## 24331 91 83 2 1
## 24332 92 83 2 1
## 24333 93 83 2 1
## 24334 94 83 2 1
## 24335 95 83 2 1
## 24336 96 83 2 1
## 24337 97 83 2 1
## 24338 98 83 2 1
## 24339 99 83 2 1
## 24340 100 83 2 1
## 24341 101 83 2 1
## 24342 102 83 2 1
## 24343 103 83 2 1
## 24344 104 83 2 1
## 24345 105 83 2 1
## 24346 106 83 2 1
## 24347 107 83 2 1
## 24348 108 83 2 1
## 24349 109 83 2 1
## 24350 110 83 2 1
## 24351 111 83 2 1
## 24352 112 83 2 1
## 24353 113 83 2 1
## 24354 114 83 2 1
## 24355 115 83 2 1
## 24356 116 83 2 1
## 24357 117 83 2 1
## 24358 118 83 2 1
## 24359 119 83 2 1
## 24360 120 83 2 1
## 24361 1 84 2 1
## 24362 2 84 2 1
## 24363 3 84 2 1
## 24364 4 84 2 1
## 24365 5 84 2 1
## 24366 6 84 2 1
## 24367 7 84 2 1
## 24368 8 84 2 1
## 24369 9 84 2 1
## 24370 10 84 2 1
## 24371 11 84 2 1
## 24372 12 84 2 1
## 24373 13 84 2 1
## 24374 14 84 2 1
## 24375 15 84 2 1
## 24376 16 84 2 1
## 24377 17 84 2 1
## 24378 18 84 2 1
## 24379 19 84 2 1
## 24380 20 84 2 1
## 24381 21 84 2 1
## 24382 22 84 2 1
## 24383 23 84 2 1
## 24384 24 84 2 1
## 24385 25 84 2 1
## 24386 26 84 2 1
## 24387 27 84 2 1
## 24388 28 84 2 1
## 24389 29 84 2 1
## 24390 30 84 2 1
## 24391 31 84 2 1
## 24392 32 84 2 1
## 24393 33 84 2 1
## 24394 34 84 2 1
## 24395 35 84 2 1
## 24396 36 84 2 1
## 24397 37 84 2 1
## 24398 38 84 2 1
## 24399 39 84 2 1
## 24400 40 84 2 1
## 24401 41 84 2 1
## 24402 42 84 2 1
## 24403 43 84 2 1
## 24404 44 84 2 1
## 24405 45 84 2 1
## 24406 46 84 2 1
## 24407 47 84 2 1
## 24408 48 84 2 1
## 24409 49 84 2 1
## 24410 50 84 2 1
## 24411 51 84 2 1
## 24412 52 84 2 1
## 24413 53 84 2 1
## 24414 54 84 2 1
## 24415 55 84 2 1
## 24416 56 84 2 1
## 24417 57 84 2 1
## 24418 58 84 2 1
## 24419 59 84 2 1
## 24420 60 84 2 1
## 24421 61 84 2 1
## 24422 62 84 2 1
## 24423 63 84 2 1
## 24424 64 84 2 1
## 24425 65 84 2 1
## 24426 66 84 2 1
## 24427 67 84 2 1
## 24428 68 84 2 1
## 24429 69 84 2 1
## 24430 70 84 2 1
## 24431 71 84 2 1
## 24432 72 84 2 1
## 24433 73 84 2 1
## 24434 74 84 2 1
## 24435 75 84 2 1
## 24436 76 84 2 1
## 24437 77 84 2 1
## 24438 78 84 2 1
## 24439 79 84 2 1
## 24440 80 84 2 1
## 24441 81 84 2 1
## 24442 82 84 2 1
## 24443 83 84 2 1
## 24444 84 84 2 1
## 24445 85 84 2 1
## 24446 86 84 2 1
## 24447 87 84 2 1
## 24448 88 84 2 1
## 24449 89 84 2 1
## 24450 90 84 2 1
## 24451 91 84 2 1
## 24452 92 84 2 1
## 24453 93 84 2 1
## 24454 94 84 2 1
## 24455 95 84 2 1
## 24456 96 84 2 1
## 24457 97 84 2 1
## 24458 98 84 2 1
## 24459 99 84 2 1
## 24460 100 84 2 1
## 24461 101 84 2 1
## 24462 102 84 2 1
## 24463 103 84 2 1
## 24464 104 84 2 1
## 24465 105 84 2 1
## 24466 106 84 2 1
## 24467 107 84 2 1
## 24468 108 84 2 1
## 24469 109 84 2 1
## 24470 110 84 2 1
## 24471 111 84 2 1
## 24472 112 84 2 1
## 24473 113 84 2 1
## 24474 114 84 2 1
## 24475 115 84 2 1
## 24476 116 84 2 1
## 24477 117 84 2 1
## 24478 118 84 2 1
## 24479 119 84 2 1
## 24480 120 84 2 1
## 24481 1 85 2 1
## 24482 2 85 2 1
## 24483 3 85 2 1
## 24484 4 85 2 1
## 24485 5 85 2 1
## 24486 6 85 2 1
## 24487 7 85 2 1
## 24488 8 85 2 1
## 24489 9 85 2 1
## 24490 10 85 2 1
## 24491 11 85 2 1
## 24492 12 85 2 1
## 24493 13 85 2 1
## 24494 14 85 2 1
## 24495 15 85 2 1
## 24496 16 85 2 1
## 24497 17 85 2 1
## 24498 18 85 2 1
## 24499 19 85 2 1
## 24500 20 85 2 1
## 24501 21 85 2 1
## 24502 22 85 2 1
## 24503 23 85 2 1
## 24504 24 85 2 1
## 24505 25 85 2 1
## 24506 26 85 2 1
## 24507 27 85 2 1
## 24508 28 85 2 1
## 24509 29 85 2 1
## 24510 30 85 2 1
## 24511 31 85 2 1
## 24512 32 85 2 1
## 24513 33 85 2 1
## 24514 34 85 2 1
## 24515 35 85 2 1
## 24516 36 85 2 1
## 24517 37 85 2 1
## 24518 38 85 2 1
## 24519 39 85 2 1
## 24520 40 85 2 1
## 24521 41 85 2 1
## 24522 42 85 2 1
## 24523 43 85 2 1
## 24524 44 85 2 1
## 24525 45 85 2 1
## 24526 46 85 2 1
## 24527 47 85 2 1
## 24528 48 85 2 1
## 24529 49 85 2 1
## 24530 50 85 2 1
## 24531 51 85 2 1
## 24532 52 85 2 1
## 24533 53 85 2 1
## 24534 54 85 2 1
## 24535 55 85 2 1
## 24536 56 85 2 1
## 24537 57 85 2 1
## 24538 58 85 2 1
## 24539 59 85 2 1
## 24540 60 85 2 1
## 24541 61 85 2 1
## 24542 62 85 2 1
## 24543 63 85 2 1
## 24544 64 85 2 1
## 24545 65 85 2 1
## 24546 66 85 2 1
## 24547 67 85 2 1
## 24548 68 85 2 1
## 24549 69 85 2 1
## 24550 70 85 2 1
## 24551 71 85 2 1
## 24552 72 85 2 1
## 24553 73 85 2 1
## 24554 74 85 2 1
## 24555 75 85 2 1
## 24556 76 85 2 1
## 24557 77 85 2 1
## 24558 78 85 2 1
## 24559 79 85 2 1
## 24560 80 85 2 1
## 24561 81 85 2 1
## 24562 82 85 2 1
## 24563 83 85 2 1
## 24564 84 85 2 1
## 24565 85 85 2 1
## 24566 86 85 2 1
## 24567 87 85 2 1
## 24568 88 85 2 1
## 24569 89 85 2 1
## 24570 90 85 2 1
## 24571 91 85 2 1
## 24572 92 85 2 1
## 24573 93 85 2 1
## 24574 94 85 2 1
## 24575 95 85 2 1
## 24576 96 85 2 1
## 24577 97 85 2 1
## 24578 98 85 2 1
## 24579 99 85 2 1
## 24580 100 85 2 1
## 24581 101 85 2 1
## 24582 102 85 2 1
## 24583 103 85 2 1
## 24584 104 85 2 1
## 24585 105 85 2 1
## 24586 106 85 2 1
## 24587 107 85 2 1
## 24588 108 85 2 1
## 24589 109 85 2 1
## 24590 110 85 2 1
## 24591 111 85 2 1
## 24592 112 85 2 1
## 24593 113 85 2 1
## 24594 114 85 2 1
## 24595 115 85 2 1
## 24596 116 85 2 1
## 24597 117 85 2 1
## 24598 118 85 2 1
## 24599 119 85 2 1
## 24600 120 85 2 1
## 24601 1 86 2 1
## 24602 2 86 2 1
## 24603 3 86 2 1
## 24604 4 86 2 1
## 24605 5 86 2 1
## 24606 6 86 2 1
## 24607 7 86 2 1
## 24608 8 86 2 1
## 24609 9 86 2 1
## 24610 10 86 2 1
## 24611 11 86 2 1
## 24612 12 86 2 1
## 24613 13 86 2 1
## 24614 14 86 2 1
## 24615 15 86 2 1
## 24616 16 86 2 1
## 24617 17 86 2 1
## 24618 18 86 2 1
## 24619 19 86 2 1
## 24620 20 86 2 1
## 24621 21 86 2 1
## 24622 22 86 2 1
## 24623 23 86 2 1
## 24624 24 86 2 1
## 24625 25 86 2 1
## 24626 26 86 2 1
## 24627 27 86 2 1
## 24628 28 86 2 1
## 24629 29 86 2 1
## 24630 30 86 2 1
## 24631 31 86 2 1
## 24632 32 86 2 1
## 24633 33 86 2 1
## 24634 34 86 2 1
## 24635 35 86 2 1
## 24636 36 86 2 1
## 24637 37 86 2 1
## 24638 38 86 2 1
## 24639 39 86 2 1
## 24640 40 86 2 1
## 24641 41 86 2 1
## 24642 42 86 2 1
## 24643 43 86 2 1
## 24644 44 86 2 1
## 24645 45 86 2 1
## 24646 46 86 2 1
## 24647 47 86 2 1
## 24648 48 86 2 1
## 24649 49 86 2 1
## 24650 50 86 2 1
## 24651 51 86 2 1
## 24652 52 86 2 1
## 24653 53 86 2 1
## 24654 54 86 2 1
## 24655 55 86 2 1
## 24656 56 86 2 1
## 24657 57 86 2 1
## 24658 58 86 2 1
## 24659 59 86 2 1
## 24660 60 86 2 1
## 24661 61 86 2 1
## 24662 62 86 2 1
## 24663 63 86 2 1
## 24664 64 86 2 1
## 24665 65 86 2 1
## 24666 66 86 2 1
## 24667 67 86 2 1
## 24668 68 86 2 1
## 24669 69 86 2 1
## 24670 70 86 2 1
## 24671 71 86 2 1
## 24672 72 86 2 1
## 24673 73 86 2 1
## 24674 74 86 2 1
## 24675 75 86 2 1
## 24676 76 86 2 1
## 24677 77 86 2 1
## 24678 78 86 2 1
## 24679 79 86 2 1
## 24680 80 86 2 1
## 24681 81 86 2 1
## 24682 82 86 2 1
## 24683 83 86 2 1
## 24684 84 86 2 1
## 24685 85 86 2 1
## 24686 86 86 2 1
## 24687 87 86 2 1
## 24688 88 86 2 1
## 24689 89 86 2 1
## 24690 90 86 2 1
## 24691 91 86 2 1
## 24692 92 86 2 1
## 24693 93 86 2 1
## 24694 94 86 2 1
## 24695 95 86 2 1
## 24696 96 86 2 1
## 24697 97 86 2 1
## 24698 98 86 2 1
## 24699 99 86 2 1
## 24700 100 86 2 1
## 24701 101 86 2 1
## 24702 102 86 2 1
## 24703 103 86 2 1
## 24704 104 86 2 1
## 24705 105 86 2 1
## 24706 106 86 2 1
## 24707 107 86 2 1
## 24708 108 86 2 1
## 24709 109 86 2 1
## 24710 110 86 2 1
## 24711 111 86 2 1
## 24712 112 86 2 1
## 24713 113 86 2 1
## 24714 114 86 2 1
## 24715 115 86 2 1
## 24716 116 86 2 1
## 24717 117 86 2 1
## 24718 118 86 2 1
## 24719 119 86 2 1
## 24720 120 86 2 1
## 24721 1 87 2 1
## 24722 2 87 2 1
## 24723 3 87 2 1
## 24724 4 87 2 1
## 24725 5 87 2 1
## 24726 6 87 2 1
## 24727 7 87 2 1
## 24728 8 87 2 1
## 24729 9 87 2 1
## 24730 10 87 2 1
## 24731 11 87 2 1
## 24732 12 87 2 1
## 24733 13 87 2 1
## 24734 14 87 2 1
## 24735 15 87 2 1
## 24736 16 87 2 1
## 24737 17 87 2 1
## 24738 18 87 2 1
## 24739 19 87 2 1
## 24740 20 87 2 1
## 24741 21 87 2 1
## 24742 22 87 2 1
## 24743 23 87 2 1
## 24744 24 87 2 1
## 24745 25 87 2 1
## 24746 26 87 2 1
## 24747 27 87 2 1
## 24748 28 87 2 1
## 24749 29 87 2 1
## 24750 30 87 2 1
## 24751 31 87 2 1
## 24752 32 87 2 1
## 24753 33 87 2 1
## 24754 34 87 2 1
## 24755 35 87 2 1
## 24756 36 87 2 1
## 24757 37 87 2 1
## 24758 38 87 2 1
## 24759 39 87 2 1
## 24760 40 87 2 1
## 24761 41 87 2 1
## 24762 42 87 2 1
## 24763 43 87 2 1
## 24764 44 87 2 1
## 24765 45 87 2 1
## 24766 46 87 2 1
## 24767 47 87 2 1
## 24768 48 87 2 1
## 24769 49 87 2 1
## 24770 50 87 2 1
## 24771 51 87 2 1
## 24772 52 87 2 1
## 24773 53 87 2 1
## 24774 54 87 2 1
## 24775 55 87 2 1
## 24776 56 87 2 1
## 24777 57 87 2 1
## 24778 58 87 2 1
## 24779 59 87 2 1
## 24780 60 87 2 1
## 24781 61 87 2 1
## 24782 62 87 2 1
## 24783 63 87 2 1
## 24784 64 87 2 1
## 24785 65 87 2 1
## 24786 66 87 2 1
## 24787 67 87 2 1
## 24788 68 87 2 1
## 24789 69 87 2 1
## 24790 70 87 2 1
## 24791 71 87 2 1
## 24792 72 87 2 1
## 24793 73 87 2 1
## 24794 74 87 2 1
## 24795 75 87 2 1
## 24796 76 87 2 1
## 24797 77 87 2 1
## 24798 78 87 2 1
## 24799 79 87 2 1
## 24800 80 87 2 1
## 24801 81 87 2 1
## 24802 82 87 2 1
## 24803 83 87 2 1
## 24804 84 87 2 1
## 24805 85 87 2 1
## 24806 86 87 2 1
## 24807 87 87 2 1
## 24808 88 87 2 1
## 24809 89 87 2 1
## 24810 90 87 2 1
## 24811 91 87 2 1
## 24812 92 87 2 1
## 24813 93 87 2 1
## 24814 94 87 2 1
## 24815 95 87 2 1
## 24816 96 87 2 1
## 24817 97 87 2 1
## 24818 98 87 2 1
## 24819 99 87 2 1
## 24820 100 87 2 1
## 24821 101 87 2 1
## 24822 102 87 2 1
## 24823 103 87 2 1
## 24824 104 87 2 1
## 24825 105 87 2 1
## 24826 106 87 2 1
## 24827 107 87 2 1
## 24828 108 87 2 1
## 24829 109 87 2 1
## 24830 110 87 2 1
## 24831 111 87 2 1
## 24832 112 87 2 1
## 24833 113 87 2 1
## 24834 114 87 2 1
## 24835 115 87 2 1
## 24836 116 87 2 1
## 24837 117 87 2 1
## 24838 118 87 2 1
## 24839 119 87 2 1
## 24840 120 87 2 1
## 24841 1 88 2 1
## 24842 2 88 2 1
## 24843 3 88 2 1
## 24844 4 88 2 1
## 24845 5 88 2 1
## 24846 6 88 2 1
## 24847 7 88 2 1
## 24848 8 88 2 1
## 24849 9 88 2 1
## 24850 10 88 2 1
## 24851 11 88 2 1
## 24852 12 88 2 1
## 24853 13 88 2 1
## 24854 14 88 2 1
## 24855 15 88 2 1
## 24856 16 88 2 1
## 24857 17 88 2 1
## 24858 18 88 2 1
## 24859 19 88 2 1
## 24860 20 88 2 1
## 24861 21 88 2 1
## 24862 22 88 2 1
## 24863 23 88 2 1
## 24864 24 88 2 1
## 24865 25 88 2 1
## 24866 26 88 2 1
## 24867 27 88 2 1
## 24868 28 88 2 1
## 24869 29 88 2 1
## 24870 30 88 2 1
## 24871 31 88 2 1
## 24872 32 88 2 1
## 24873 33 88 2 1
## 24874 34 88 2 1
## 24875 35 88 2 1
## 24876 36 88 2 1
## 24877 37 88 2 1
## 24878 38 88 2 1
## 24879 39 88 2 1
## 24880 40 88 2 1
## 24881 41 88 2 1
## 24882 42 88 2 1
## 24883 43 88 2 1
## 24884 44 88 2 1
## 24885 45 88 2 1
## 24886 46 88 2 1
## 24887 47 88 2 1
## 24888 48 88 2 1
## 24889 49 88 2 1
## 24890 50 88 2 1
## 24891 51 88 2 1
## 24892 52 88 2 1
## 24893 53 88 2 1
## 24894 54 88 2 1
## 24895 55 88 2 1
## 24896 56 88 2 1
## 24897 57 88 2 1
## 24898 58 88 2 1
## 24899 59 88 2 1
## 24900 60 88 2 1
## 24901 61 88 2 1
## 24902 62 88 2 1
## 24903 63 88 2 1
## 24904 64 88 2 1
## 24905 65 88 2 1
## 24906 66 88 2 1
## 24907 67 88 2 1
## 24908 68 88 2 1
## 24909 69 88 2 1
## 24910 70 88 2 1
## 24911 71 88 2 1
## 24912 72 88 2 1
## 24913 73 88 2 1
## 24914 74 88 2 1
## 24915 75 88 2 1
## 24916 76 88 2 1
## 24917 77 88 2 1
## 24918 78 88 2 1
## 24919 79 88 2 1
## 24920 80 88 2 1
## 24921 81 88 2 1
## 24922 82 88 2 1
## 24923 83 88 2 1
## 24924 84 88 2 1
## 24925 85 88 2 1
## 24926 86 88 2 1
## 24927 87 88 2 1
## 24928 88 88 2 1
## 24929 89 88 2 1
## 24930 90 88 2 1
## 24931 91 88 2 1
## 24932 92 88 2 1
## 24933 93 88 2 1
## 24934 94 88 2 1
## 24935 95 88 2 1
## 24936 96 88 2 1
## 24937 97 88 2 1
## 24938 98 88 2 1
## 24939 99 88 2 1
## 24940 100 88 2 1
## 24941 101 88 2 1
## 24942 102 88 2 1
## 24943 103 88 2 1
## 24944 104 88 2 1
## 24945 105 88 2 1
## 24946 106 88 2 1
## 24947 107 88 2 1
## 24948 108 88 2 1
## 24949 109 88 2 1
## 24950 110 88 2 1
## 24951 111 88 2 1
## 24952 112 88 2 1
## 24953 113 88 2 1
## 24954 114 88 2 1
## 24955 115 88 2 1
## 24956 116 88 2 1
## 24957 117 88 2 1
## 24958 118 88 2 1
## 24959 119 88 2 1
## 24960 120 88 2 1
## 24961 1 89 2 1
## 24962 2 89 2 1
## 24963 3 89 2 1
## 24964 4 89 2 1
## 24965 5 89 2 1
## 24966 6 89 2 1
## 24967 7 89 2 1
## 24968 8 89 2 1
## 24969 9 89 2 1
## 24970 10 89 2 1
## 24971 11 89 2 1
## 24972 12 89 2 1
## 24973 13 89 2 1
## 24974 14 89 2 1
## 24975 15 89 2 1
## 24976 16 89 2 1
## 24977 17 89 2 1
## 24978 18 89 2 1
## 24979 19 89 2 1
## 24980 20 89 2 1
## 24981 21 89 2 1
## 24982 22 89 2 1
## 24983 23 89 2 1
## 24984 24 89 2 1
## 24985 25 89 2 1
## 24986 26 89 2 1
## 24987 27 89 2 1
## 24988 28 89 2 1
## 24989 29 89 2 1
## 24990 30 89 2 1
## 24991 31 89 2 1
## 24992 32 89 2 1
## 24993 33 89 2 1
## 24994 34 89 2 1
## 24995 35 89 2 1
## 24996 36 89 2 1
## 24997 37 89 2 1
## 24998 38 89 2 1
## 24999 39 89 2 1
## [ reached 'max' / getOption("max.print") -- omitted 12935001 rows ]
Each row is a meta-ecosystem.
It contains also “fake” meta-ecosystems which I created from
unconnected ecosystems
(metaecosystem type = Small-Large unconnected &
metaecosystem type = Medium-Medium unconnected).
Warning appear after the following code, as:
# --- COMPUTE META-ECOSYSTEMS FOR EACH TIME POINT --- #
ds_metaecosystems = NULL
row_i = 0
for (combination_i in 1:n_ecosystems_combinations) {
for (time_point_selected in time_points) {
row_i = row_i + 1
current_day = sampling_days[time_point_selected + 1]
current_system_nr = ecos_combin[combination_i, ]$system_nr
current_combination = ecos_combin[combination_i, ]$ecosystems_combined
current_disturbance = ecos_combin[combination_i, ]$disturbance
current_metaeco_type = ecos_combin[combination_i, ]$metaecosystem_type
current_connection = ecos_combin[combination_i, ]$connection
current_IDs = c(ecos_combin[combination_i, ]$ID_first_ecosystem,
ecos_combin[combination_i, ]$ID_second_ecosystem)
if (current_system_nr %in% metaecosystems_to_take_off)
next
if (current_IDs[1] == current_IDs[2])
next
species_vector_two_ecosystems = ds_ecosystems %>%
filter(time_point == time_point_selected,
culture_ID %in% current_IDs) %>%
ungroup() %>%
select(all_of(protist_species_indiv_per_ml))
absence_presence_two_ecosystems <-
ifelse(species_vector_two_ecosystems > 0, 1, 0)
#Alpha diversity: Shannon (mean between the two ecosystems)
shannon_ecosystem_1 = diversity(species_vector_two_ecosystems[1, ], index = "shannon")
shannon_ecosystem_2 = diversity(species_vector_two_ecosystems[2, ], index = "shannon")
shannon_value = (shannon_ecosystem_1 + shannon_ecosystem_2) / 2
#Alpha diversity: Species richness (mean between the two ecosystems)
richness_ecosystem_1 = specnumber(species_vector_two_ecosystems[1, ])
richness_ecosystem_2 = specnumber(species_vector_two_ecosystems[2, ])
mean_richness_value = (richness_ecosystem_1 + richness_ecosystem_2) / 2
#Beta diversity: Jaccard
jaccard_index_value = vegdist(species_vector_two_ecosystems,
method = "jaccard") %>%
as.numeric()
#Beta diversity: Bray Curtis
bray_curtis_value = vegdist(species_vector_two_ecosystems,
method = "bray") %>%
as.numeric()
#Beta diversity: partitioning of beta diversity from Sorensen index into turnover (Simpson pair-wise dissimilarity) and nestedness (nestedness-fraction of Sorensen)
betapart_core_object = betapart.core(absence_presence_two_ecosystems)
beta_spatial_turnover_value = beta.pair(betapart_core_object)$beta.sim %>% as.double()
beta_nestedness_value = beta.pair(betapart_core_object)$beta.sne %>% as.double()
beta_total_value = beta.pair(betapart_core_object)$beta.sor %>% as.double()
#Gamma diversity: Meta-ecosystem richness
metaecosystem_richness_value = colSums(species_vector_two_ecosystems) %>%
specnumber()
#Put everything together
ds_metaecosystems[[row_i]] = ds_ecosystems %>%
filter(culture_ID %in% current_IDs,
time_point == time_point_selected) %>%
summarise(total_metaecosystem_bioarea_mm2 = sum(bioarea_tot_mm2),
total_metaecosystem_Ble_indiv = sum(Ble_tot_indiv),
total_metaecosystem_Cep_indiv = sum(Cep_tot_indiv),
total_metaecosystem_Col_indiv = sum(Col_tot_indiv),
total_metaecosystem_Eug_indiv = sum(Eug_tot_indiv),
total_metaecosystem_Eup_indiv = sum(Eup_tot_indiv),
total_metaecosystem_Lox_indiv = sum(Lox_tot_indiv),
total_metaecosystem_Pau_indiv = sum(Pau_tot_indiv),
total_metaecosystem_Pca_indiv = sum(Pca_tot_indiv),
total_metaecosystem_Spi_indiv = sum(Spi_tot_indiv),
total_metaecosystem_Spi_te_indiv = sum(Spi_te_tot_indiv),
total_metaecosystem_Tet_indiv = sum(Tet_tot_indiv),
total_water_addition_ml = sum(water_addition_ml)) %>%
mutate(system_nr = current_system_nr,
ecosystems_combined = current_combination,
metaecosystem_type = current_metaeco_type,
ecosystem_size_symmetry = case_when(metaecosystem_type == "Small-Large" ~ "asymmetric",
metaecosystem_type == "Medium-Medium" ~ "symmetric",
metaecosystem_type == "Small-Small" ~ "symmetric",
metaecosystem_type == "Large-Large" ~ "symmetric"),
connection = current_connection,
disturbance = current_disturbance,
time_point = time_point_selected,
day = current_day,
jaccard_index = jaccard_index_value,
bray_curtis = bray_curtis_value,
beta_spatial_turnover = beta_spatial_turnover_value,
beta_nestedness = beta_nestedness_value,
beta_total = beta_total_value,
metaecosystem_richness = metaecosystem_richness_value,
mean_shannon = shannon_value,
mean_richness = mean_richness_value) %>%
ungroup()
}
}
## Warning in vegdist(species_vector_two_ecosystems, method = "jaccard"): you have empty rows: their dissimilarities may be
## meaningless in method "jaccard"
## Warning in vegdist(species_vector_two_ecosystems, method = "bray"): you have empty rows: their dissimilarities may be
## meaningless in method "bray"
## Warning in vegdist(species_vector_two_ecosystems, method = "jaccard"): you have empty rows: their dissimilarities may be
## meaningless in method "jaccard"
## Warning in vegdist(species_vector_two_ecosystems, method = "bray"): you have empty rows: their dissimilarities may be
## meaningless in method "bray"
## Warning in vegdist(species_vector_two_ecosystems, method = "jaccard"): you have empty rows: their dissimilarities may be
## meaningless in method "jaccard"
## Warning in vegdist(species_vector_two_ecosystems, method = "bray"): you have empty rows: their dissimilarities may be
## meaningless in method "bray"
## Warning in vegdist(species_vector_two_ecosystems, method = "jaccard"): you have empty rows: their dissimilarities may be
## meaningless in method "jaccard"
## Warning in vegdist(species_vector_two_ecosystems, method = "bray"): you have empty rows: their dissimilarities may be
## meaningless in method "bray"
## Warning in vegdist(species_vector_two_ecosystems, method = "jaccard"): you have empty rows: their dissimilarities may be
## meaningless in method "jaccard"
## Warning in vegdist(species_vector_two_ecosystems, method = "bray"): you have empty rows: their dissimilarities may be
## meaningless in method "bray"
## Warning in vegdist(species_vector_two_ecosystems, method = "jaccard"): you have empty rows: their dissimilarities may be
## meaningless in method "jaccard"
## Warning in vegdist(species_vector_two_ecosystems, method = "bray"): you have empty rows: their dissimilarities may be
## meaningless in method "bray"
## Warning in vegdist(species_vector_two_ecosystems, method = "jaccard"): you have empty rows: their dissimilarities may be
## meaningless in method "jaccard"
## Warning in vegdist(species_vector_two_ecosystems, method = "bray"): you have empty rows: their dissimilarities may be
## meaningless in method "bray"
## Warning in vegdist(species_vector_two_ecosystems, method = "jaccard"): you have empty rows: their dissimilarities may be
## meaningless in method "jaccard"
## Warning in vegdist(species_vector_two_ecosystems, method = "bray"): you have empty rows: their dissimilarities may be
## meaningless in method "bray"
## Warning in vegdist(species_vector_two_ecosystems, method = "jaccard"): you have empty rows: their dissimilarities may be
## meaningless in method "jaccard"
## Warning in vegdist(species_vector_two_ecosystems, method = "bray"): you have empty rows: their dissimilarities may be
## meaningless in method "bray"
## Warning in vegdist(species_vector_two_ecosystems, method = "jaccard"): you have empty rows: their dissimilarities may be
## meaningless in method "jaccard"
## Warning in vegdist(species_vector_two_ecosystems, method = "bray"): you have empty rows: their dissimilarities may be
## meaningless in method "bray"
## Warning in vegdist(species_vector_two_ecosystems, method = "jaccard"): you have empty rows: their dissimilarities may be
## meaningless in method "jaccard"
## Warning in vegdist(species_vector_two_ecosystems, method = "bray"): you have empty rows: their dissimilarities may be
## meaningless in method "bray"
## Warning in vegdist(species_vector_two_ecosystems, method = "jaccard"): you have empty rows: their dissimilarities may be
## meaningless in method "jaccard"
## Warning in vegdist(species_vector_two_ecosystems, method = "bray"): you have empty rows: their dissimilarities may be
## meaningless in method "bray"
## Warning in vegdist(species_vector_two_ecosystems, method = "jaccard"): you have empty rows: their dissimilarities may be
## meaningless in method "jaccard"
## Warning in vegdist(species_vector_two_ecosystems, method = "bray"): you have empty rows: their dissimilarities may be
## meaningless in method "bray"
## Warning in vegdist(species_vector_two_ecosystems, method = "jaccard"): you have empty rows: their dissimilarities may be
## meaningless in method "jaccard"
## Warning in vegdist(species_vector_two_ecosystems, method = "bray"): you have empty rows: their dissimilarities may be
## meaningless in method "bray"
## Warning in vegdist(species_vector_two_ecosystems, method = "jaccard"): you have empty rows: their dissimilarities may be
## meaningless in method "jaccard"
## Warning in vegdist(species_vector_two_ecosystems, method = "bray"): you have empty rows: their dissimilarities may be
## meaningless in method "bray"
## Warning in vegdist(species_vector_two_ecosystems, method = "jaccard"): you have empty rows: their dissimilarities may be
## meaningless in method "jaccard"
## Warning in vegdist(species_vector_two_ecosystems, method = "bray"): you have empty rows: their dissimilarities may be
## meaningless in method "bray"
## Warning in vegdist(species_vector_two_ecosystems, method = "jaccard"): you have empty rows: their dissimilarities may be
## meaningless in method "jaccard"
## Warning in vegdist(species_vector_two_ecosystems, method = "bray"): you have empty rows: their dissimilarities may be
## meaningless in method "bray"
## Warning in vegdist(species_vector_two_ecosystems, method = "jaccard"): you have empty rows: their dissimilarities may be
## meaningless in method "jaccard"
## Warning in vegdist(species_vector_two_ecosystems, method = "bray"): you have empty rows: their dissimilarities may be
## meaningless in method "bray"
## Warning in vegdist(species_vector_two_ecosystems, method = "jaccard"): you have empty rows: their dissimilarities may be
## meaningless in method "jaccard"
## Warning in vegdist(species_vector_two_ecosystems, method = "bray"): you have empty rows: their dissimilarities may be
## meaningless in method "bray"
## Warning in vegdist(species_vector_two_ecosystems, method = "jaccard"): you have empty rows: their dissimilarities may be
## meaningless in method "jaccard"
## Warning in vegdist(species_vector_two_ecosystems, method = "bray"): you have empty rows: their dissimilarities may be
## meaningless in method "bray"
## Warning in vegdist(species_vector_two_ecosystems, method = "jaccard"): you have empty rows: their dissimilarities may be
## meaningless in method "jaccard"
## Warning in vegdist(species_vector_two_ecosystems, method = "bray"): you have empty rows: their dissimilarities may be
## meaningless in method "bray"
## Warning in vegdist(species_vector_two_ecosystems, method = "jaccard"): you have empty rows: their dissimilarities may be
## meaningless in method "jaccard"
## Warning in vegdist(species_vector_two_ecosystems, method = "bray"): you have empty rows: their dissimilarities may be
## meaningless in method "bray"
## Warning in vegdist(species_vector_two_ecosystems, method = "jaccard"): you have empty rows: their dissimilarities may be
## meaningless in method "jaccard"
## Warning in vegdist(species_vector_two_ecosystems, method = "bray"): you have empty rows: their dissimilarities may be
## meaningless in method "bray"
## Warning in vegdist(species_vector_two_ecosystems, method = "jaccard"): you have empty rows: their dissimilarities may be
## meaningless in method "jaccard"
## Warning in vegdist(species_vector_two_ecosystems, method = "bray"): you have empty rows: their dissimilarities may be
## meaningless in method "bray"
## Warning in vegdist(species_vector_two_ecosystems, method = "jaccard"): you have empty rows: their dissimilarities may be
## meaningless in method "jaccard"
## Warning in vegdist(species_vector_two_ecosystems, method = "bray"): you have empty rows: their dissimilarities may be
## meaningless in method "bray"
## Warning in vegdist(species_vector_two_ecosystems, method = "jaccard"): you have empty rows: their dissimilarities may be
## meaningless in method "jaccard"
## Warning in vegdist(species_vector_two_ecosystems, method = "bray"): you have empty rows: their dissimilarities may be
## meaningless in method "bray"
## Warning in vegdist(species_vector_two_ecosystems, method = "jaccard"): you have empty rows: their dissimilarities may be
## meaningless in method "jaccard"
## Warning in vegdist(species_vector_two_ecosystems, method = "bray"): you have empty rows: their dissimilarities may be
## meaningless in method "bray"
## Warning in vegdist(species_vector_two_ecosystems, method = "jaccard"): you have empty rows: their dissimilarities may be
## meaningless in method "jaccard"
## Warning in vegdist(species_vector_two_ecosystems, method = "bray"): you have empty rows: their dissimilarities may be
## meaningless in method "bray"
## Warning in vegdist(species_vector_two_ecosystems, method = "jaccard"): you have empty rows: their dissimilarities may be
## meaningless in method "jaccard"
## Warning in vegdist(species_vector_two_ecosystems, method = "bray"): you have empty rows: their dissimilarities may be
## meaningless in method "bray"
## Warning in vegdist(species_vector_two_ecosystems, method = "jaccard"): you have empty rows: their dissimilarities may be
## meaningless in method "jaccard"
## Warning in vegdist(species_vector_two_ecosystems, method = "bray"): you have empty rows: their dissimilarities may be
## meaningless in method "bray"
## Warning in vegdist(species_vector_two_ecosystems, method = "jaccard"): you have empty rows: their dissimilarities may be
## meaningless in method "jaccard"
## Warning in vegdist(species_vector_two_ecosystems, method = "bray"): you have empty rows: their dissimilarities may be
## meaningless in method "bray"
## Warning in vegdist(species_vector_two_ecosystems, method = "jaccard"): you have empty rows: their dissimilarities may be
## meaningless in method "jaccard"
## Warning in vegdist(species_vector_two_ecosystems, method = "bray"): you have empty rows: their dissimilarities may be
## meaningless in method "bray"
## Warning in vegdist(species_vector_two_ecosystems, method = "jaccard"): you have empty rows: their dissimilarities may be
## meaningless in method "jaccard"
## Warning in vegdist(species_vector_two_ecosystems, method = "bray"): you have empty rows: their dissimilarities may be
## meaningless in method "bray"
## Warning in vegdist(species_vector_two_ecosystems, method = "jaccard"): you have empty rows: their dissimilarities may be
## meaningless in method "jaccard"
## Warning in vegdist(species_vector_two_ecosystems, method = "bray"): you have empty rows: their dissimilarities may be
## meaningless in method "bray"
## Warning in vegdist(species_vector_two_ecosystems, method = "jaccard"): you have empty rows: their dissimilarities may be
## meaningless in method "jaccard"
## Warning in vegdist(species_vector_two_ecosystems, method = "bray"): you have empty rows: their dissimilarities may be
## meaningless in method "bray"
## Warning in vegdist(species_vector_two_ecosystems, method = "jaccard"): you have empty rows: their dissimilarities may be
## meaningless in method "jaccard"
## Warning in vegdist(species_vector_two_ecosystems, method = "jaccard"): missing
## values in results
## Warning in vegdist(species_vector_two_ecosystems, method = "bray"): you have empty rows: their dissimilarities may be
## meaningless in method "bray"
## Warning in vegdist(species_vector_two_ecosystems, method = "bray"): missing
## values in results
## Warning in vegdist(species_vector_two_ecosystems, method = "jaccard"): you have empty rows: their dissimilarities may be
## meaningless in method "jaccard"
## Warning in vegdist(species_vector_two_ecosystems, method = "jaccard"): missing
## values in results
## Warning in vegdist(species_vector_two_ecosystems, method = "bray"): you have empty rows: their dissimilarities may be
## meaningless in method "bray"
## Warning in vegdist(species_vector_two_ecosystems, method = "bray"): missing
## values in results
## Warning in vegdist(species_vector_two_ecosystems, method = "jaccard"): you have empty rows: their dissimilarities may be
## meaningless in method "jaccard"
## Warning in vegdist(species_vector_two_ecosystems, method = "bray"): you have empty rows: their dissimilarities may be
## meaningless in method "bray"
## Warning in vegdist(species_vector_two_ecosystems, method = "jaccard"): you have empty rows: their dissimilarities may be
## meaningless in method "jaccard"
## Warning in vegdist(species_vector_two_ecosystems, method = "bray"): you have empty rows: their dissimilarities may be
## meaningless in method "bray"
# Tidy up
ds_metaecosystems = ds_metaecosystems %>%
bind_rows() %>%
as.data.frame() %>%
select(time_point,
day,
system_nr,
ecosystems_combined,
disturbance,
metaecosystem_type,
ecosystem_size_symmetry,
connection,
mean_shannon,
mean_richness,
jaccard_index,
bray_curtis,
beta_spatial_turnover,
beta_nestedness,
beta_total,
metaecosystem_richness,
total_metaecosystem_bioarea_mm2,
paste0("total_metaecosystem_", protist_species, "_indiv"),
total_water_addition_ml)
# --- TEST CODE --- #
expect_equal(nrow(ds_metaecosystems),
n_time_points * n_ecosystems_combinations)
metaecosystem_type_selected = c("Medium-Medium", "Small-Large")
response_variable_selected = "mean_shannon"
# --- ORIGINAL DATA - PLOT MEAN ± 95% CI --- #
# Create plots
p1=plot.metaecos.points(ds_metaecosystems %>% filter(disturbance == "high"),
metaecosystem_type_selected,
response_variable_selected) +
annotate("text",
x = Inf,
y = Inf,
label = "High disturbance",
hjust = 1.1,
vjust = 1.1,
size = 5,
fontface = "bold") + # Adjust position and style
theme(axis.title.x = element_blank(), # Remove x-axis title
axis.text.x = element_blank()) # Remove x-axis text
p2=plot.metaecos.points(ds_metaecosystems %>% filter(disturbance == "low"),
metaecosystem_type_selected,
response_variable_selected) +
theme(legend.position = "none") +
annotate("text",
x = Inf,
y = Inf,
label = "Low disturbance",
hjust = 1.1,
vjust = 1.1,
size = 5,
fontface = "bold") # Adjust position and style
# Arrange the plots vertically
ggarrange(p1,
p2,
ncol = 1,
nrow = 2,
heights = c(0.9, 1),
common.legend = TRUE,
align = "v")
# --- ORIGINAL DATA - PLOT SINGLE REPLICATES --- #
p1=plot.metaecos.replicates(ds_metaecosystems %>% filter(disturbance == "high"),
metaecosystem_type_selected,
response_variable_selected) +
annotate("text",
x = Inf,
y = Inf,
label = "High disturbance",
hjust = 1.1,
vjust = 1.1,
size = 5,
fontface = "bold") + # Adjust position and style
theme(axis.title.x = element_blank(), # Remove x-axis title
axis.text.x = element_blank()) # Remove x-axis text
p2=plot.metaecos.replicates(ds_metaecosystems %>% filter(disturbance == "low"),
metaecosystem_type_selected,
response_variable_selected) +
theme(legend.position = "none") +
annotate("text",
x = Inf,
y = Inf,
label = "Low disturbance",
hjust = 1.1,
vjust = 1.1,
size = 5,
fontface = "bold") # Adjust position and style
# Arrange the plots vertically
ggarrange(p1,
p2,
ncol = 1,
nrow = 2,
heights = c(0.9, 1),
common.legend = TRUE,
align = "v")
# --- PREPARE DATA --- #
# Add baselines
baselines = ds_metaecosystems %>%
filter(time_point == time_point_of_baselines) %>%
select(system_nr,
all_of(response_variable_selected)) %>%
rename(baseline = all_of(response_variable_selected))
data_for_analysis = ds_metaecosystems %>%
left_join(baselines)
# Filter and change level names
data_for_analysis = data_for_analysis %>%
filter(time_point %in% time_points_model,
metaecosystem_type %in% metaecosystem_type_selected,
#!is.na(total_water_addition_ml),
!is.na(!!sym(response_variable_selected))) %>%
mutate(metaecosystem_type = case_when(metaecosystem_type == "Small-Large unconnected" ~ "SL unc",
metaecosystem_type == "Medium-Medium unconnected" ~ "MM unc",
metaecosystem_type == "Small-Small meta-ecosystem" ~ "SS con",
metaecosystem_type == "Medium-Medium meta-ecosystem" ~ "MM con",
metaecosystem_type == "Large-Large meta-ecosystem" ~ "LL con",
metaecosystem_type == "Small-Large meta-ecosystem" ~ "SL con",
TRUE ~ metaecosystem_type),
type_conn = paste(metaecosystem_type, connection))
# --- DATA FOR ANALYSIS - PLOT MEAN ± 95% CI OF FILTERED DATA --- #
p1=plot.metaecos.points(data_for_analysis %>% filter(disturbance == "high"),
metaecosystem_type_selected,
response_variable_selected) +
annotate("text",
x = Inf,
y = Inf,
label = "High disturbance",
hjust = 1.1,
vjust = 1.1,
size = 5,
fontface = "bold") + # Adjust position and style
theme(axis.title.x = element_blank(), # Remove x-axis title
axis.text.x = element_blank()) # Remove x-axis text
p2=plot.metaecos.points(data_for_analysis %>% filter(disturbance == "low"),
metaecosystem_type_selected,
response_variable_selected) +
theme(legend.position = "none") +
annotate("text",
x = Inf,
y = Inf,
label = "Low disturbance",
hjust = 1.1,
vjust = 1.1,
size = 5,
fontface = "bold") # Adjust position and style
# Arrange the plots vertically
ggarrange(p1,
p2,
ncol = 1,
nrow = 2,
heights = c(1, 0.7))
# --- DEFINE MODEL FORMULA --- #
formula = paste("get(response_variable_selected) ~",
"type_conn * disturbance * day +",
"(day | baseline) +",
"(day | system_nr)") %>%
print()
## [1] "get(response_variable_selected) ~ type_conn * disturbance * day + (day | baseline) + (day | system_nr)"
# --- FIND ECOSYSTEM COMBINATIONS FOR ANALYSIS --- #
# Set up parameters
bootstrap_iterations = 1000
# Produce random (bootstrapped) rows
random_sets_of_sets <- runif(bootstrap_iterations,
min = 1,
max = nrow(ecos_combin_unconn_sets_of_sets)) %>%
round()
# Filter combinations according to the random rows
sets_of_sets_filtered = ecos_combin_unconn_sets_of_sets %>%
filter(row_number() %in% random_sets_of_sets)
# --- DEFINE WHERE TO SAVE (AND FROM WHERE TO LATER READ) THE RESULTS OF THE ANALYSIS --- #
file_path = here("3_results", "tables",
paste0("results_metaeco_",
response_variable_selected,
"_",
bootstrap_iterations,
"_iterations",
".csv"))
# --- FIND MODEL STATISTICS THROUGH MULTIPLE ITERATIONS RESHUFFLING ECOSYSTEM COMBINATIONS --- #
# Set parameters and initialise
results_table = list()
failed_optimizers = 0
# Fit models
for (comb_i in 1:bootstrap_iterations) {
# Find system numbers
system_nr_unconn_selected = ecos_combin_unconn_sets %>%
filter(metaecosystem_type == "Small-Large" &
disturbance == "low" &
set == sets_of_sets_filtered[comb_i, ]$set_SL_low |
metaecosystem_type == "Small-Large" &
disturbance == "high" &
set == sets_of_sets_filtered[comb_i, ]$set_SL_high |
metaecosystem_type == "Medium-Medium" &
disturbance == "low" &
set == sets_of_sets_filtered[comb_i, ]$set_MM_low |
metaecosystem_type == "Medium-Medium" &
disturbance == "high" &
set == sets_of_sets_filtered[comb_i, ]$set_MM_high) %>%
pull(system_nr)
# Prepare data for analysis
data_for_analysis_comb_i = data_for_analysis %>%
filter(system_nr %in% system_nr_unconn_selected | connection == "connected")
# Run model
model = try.different.optimizers.metaecos(data_for_analysis_comb_i,
formula)
# If all the optimisers fail, move on to the next iteration
if (is.null(model)) {
cat("This model could not be fitted with any optimiser (",
system_nr_unconn_selected,
") \n")
failed_optimizers = failed_optimizers + 1
next
}
# Residuals - show Q-Q plot
# print(qqnorm(resid(model))); print(qqline(resid(model)))
# Residuals - save Residuals vs Fitted plot
# plot = data_for_analysis_comb_i %>%
# mutate(predicted = fitted(model),
# residuals = resid(model)) %>%
# ggplot(aes(x = predicted,
# y = residuals)) +
# geom_point()
#
# ggsave(here("3_results",
# "residual_plots",
# paste0("res_vs_fit_",
# response_variable_selected,
# "_combination_",
# comb_i,
# ".png")),
# plot = plot,
# width = 8,
# height = 6)
# Set up contrasts
emmeans_output = emmeans(model,
specs = ~ type_conn * day * disturbance ,
method = "pairwise",
adjust = "sidak",
bias.adj = TRUE,
lmer.df = "satterthwaite")
summary(emmeans_output)
zeros = rep(0, 8)
high_MM_conn = zeros; high_MM_conn[1] = 1
high_MM_unc = zeros; high_MM_unc[2] = 1
high_SL_conn = zeros; high_SL_conn[3] = 1
high_SL_unc = zeros; high_SL_unc[4] = 1
low_MM_conn = zeros; low_MM_conn[5] = 1
low_MM_unc = zeros; low_MM_unc[6] = 1
low_SL_conn = zeros; low_SL_conn[7] = 1
low_SL_unc = zeros; low_SL_unc[8] = 1
# Save contrasts in a table
results_table[[comb_i]] = contrast(
emmeans_output,
method = list("high SL_conn - SL_unc" = high_SL_conn - high_SL_unc,
"high MM_conn - MM_unc" = high_MM_conn - high_MM_unc,
"low SL_conn - SL_unc" = low_SL_conn - low_SL_unc,
"low MM_conn - MM_unc" = low_MM_conn - low_MM_unc)) %>%
as.data.frame() %>%
mutate(combination = comb_i,
system_nr_unconnected_systems = paste(system_nr_unconn_selected, collapse = ", "))
}
## [1] "Model successfully fitted with optimizer: Nelder_Mead "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: nlminbwrap "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model fitting failed with all optimizers."
## This model could not be fitted with any optimiser ( 1005 1008 1012 1019 1021 1028 1035 1037 1044 1046 1055 1056 1063 )
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: nlminbwrap "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: nlminbwrap "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: Nelder_Mead "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model fitting failed with all optimizers."
## This model could not be fitted with any optimiser ( 1001 1007 1014 1020 1023 1029 1035 1037 1043 1052 1047 1056 1063 )
## [1] "Model successfully fitted with optimizer: nlminbwrap "
## [1] "Model successfully fitted with optimizer: nlminbwrap "
## [1] "Model successfully fitted with optimizer: nlminbwrap "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: nlminbwrap "
## [1] "Model successfully fitted with optimizer: nlminbwrap "
## [1] "Model successfully fitted with optimizer: nlminbwrap "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: nlminbwrap "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: nlminbwrap "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: Nelder_Mead "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model fitting failed with all optimizers."
## This model could not be fitted with any optimiser ( 1005 1009 1011 1017 1023 1027 1034 1038 1041 1050 1049 1056 1064 )
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model fitting failed with all optimizers."
## This model could not be fitted with any optimiser ( 1005 1008 1012 1016 1024 1028 1035 1039 1042 1050 1055 1056 1064 )
## [1] "Model successfully fitted with optimizer: nlminbwrap "
## [1] "Model successfully fitted with optimizer: nlminbwrap "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: Nelder_Mead "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: nloptwrap (NLOPT_LN_NELDERMEAD)"
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: nlminbwrap "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: optimx (L-BFGS-B)"
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: Nelder_Mead "
## [1] "Model successfully fitted with optimizer: nlminbwrap "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: nlminbwrap "
## [1] "Model successfully fitted with optimizer: Nelder_Mead "
## [1] "Model successfully fitted with optimizer: nlminbwrap "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: Nelder_Mead "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: nlminbwrap "
## [1] "Model successfully fitted with optimizer: Nelder_Mead "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model fitting failed with all optimizers."
## This model could not be fitted with any optimiser ( 1001 1007 1014 1018 1025 1026 1032 1038 1045 1047 1051 1057 1061 )
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: Nelder_Mead "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: nloptwrap (NLOPT_LN_NELDERMEAD)"
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model fitting failed with all optimizers."
## This model could not be fitted with any optimiser ( 1001 1008 1012 1020 1024 1027 1033 1039 1041 1051 1054 1057 1061 )
## [1] "Model successfully fitted with optimizer: Nelder_Mead "
## [1] "Model successfully fitted with optimizer: Nelder_Mead "
## [1] "Model successfully fitted with optimizer: nlminbwrap "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: nlminbwrap "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: optimx (L-BFGS-B)"
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: nlminbwrap "
## [1] "Model successfully fitted with optimizer: optimx (L-BFGS-B)"
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: Nelder_Mead "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: nloptwrap (NLOPT_LN_NELDERMEAD)"
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: nlminbwrap "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: nlminbwrap "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: nlminbwrap "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model fitting failed with all optimizers."
## This model could not be fitted with any optimiser ( 1005 1008 1014 1016 1022 1029 1032 1038 1045 1050 1055 1057 1062 )
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: nlminbwrap "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model fitting failed with all optimizers."
## This model could not be fitted with any optimiser ( 1001 1010 1012 1019 1023 1030 1031 1038 1044 1053 1049 1057 1062 )
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: nlminbwrap "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model fitting failed with all optimizers."
## This model could not be fitted with any optimiser ( 1005 1007 1011 1019 1023 1027 1035 1039 1041 1055 1050 1057 1062 )
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: nlminbwrap "
## [1] "Model successfully fitted with optimizer: nlminbwrap "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: Nelder_Mead "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model fitting failed with all optimizers."
## This model could not be fitted with any optimiser ( 1004 1006 1015 1018 1022 1026 1032 1039 1045 1052 1047 1057 1065 )
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model fitting failed with all optimizers."
## This model could not be fitted with any optimiser ( 1003 1007 1014 1020 1021 1026 1032 1039 1043 1052 1048 1057 1065 )
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: nloptwrap (NLOPT_LN_NELDERMEAD)"
## [1] "Model successfully fitted with optimizer: Nelder_Mead "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: Nelder_Mead "
## [1] "Model successfully fitted with optimizer: Nelder_Mead "
## [1] "Model successfully fitted with optimizer: nlminbwrap "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: optimx (L-BFGS-B)"
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model fitting failed with all optimizers."
## This model could not be fitted with any optimiser ( 1002 1009 1013 1020 1021 1027 1031 1039 1043 1048 1050 1058 1060 )
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: Nelder_Mead "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: nlminbwrap "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: optimx (L-BFGS-B)"
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: nlminbwrap "
## [1] "Model fitting failed with all optimizers."
## This model could not be fitted with any optimiser ( 1001 1008 1014 1020 1022 1029 1032 1040 1041 1054 1051 1058 1060 )
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: Nelder_Mead "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: Nelder_Mead "
## [1] "Model successfully fitted with optimizer: Nelder_Mead "
## [1] "Model successfully fitted with optimizer: nlminbwrap "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: Nelder_Mead "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: Nelder_Mead "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: nlminbwrap "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: nlminbwrap "
## [1] "Model successfully fitted with optimizer: Nelder_Mead "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: nlminbwrap "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model fitting failed with all optimizers."
## This model could not be fitted with any optimiser ( 1001 1009 1015 1018 1022 1026 1033 1040 1042 1053 1052 1058 1062 )
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: nlminbwrap "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: nlminbwrap "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: nlminbwrap "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model fitting failed with all optimizers."
## This model could not be fitted with any optimiser ( 1003 1009 1015 1016 1022 1027 1031 1040 1044 1047 1052 1058 1064 )
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model fitting failed with all optimizers."
## This model could not be fitted with any optimiser ( 1003 1009 1011 1017 1025 1029 1032 1036 1043 1048 1052 1058 1064 )
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: nlminbwrap "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: nlminbwrap "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: Nelder_Mead "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: nlminbwrap "
## [1] "Model fitting failed with all optimizers."
## This model could not be fitted with any optimiser ( 1003 1009 1012 1020 1021 1030 1032 1036 1044 1047 1051 1059 1060 )
## [1] "Model successfully fitted with optimizer: Nelder_Mead "
## [1] "Model successfully fitted with optimizer: nlminbwrap "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model fitting failed with all optimizers."
## This model could not be fitted with any optimiser ( 1003 1009 1012 1016 1025 1027 1033 1039 1045 1047 1055 1059 1060 )
## [1] "Model successfully fitted with optimizer: Nelder_Mead "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: optimx (L-BFGS-B)"
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model fitting failed with all optimizers."
## This model could not be fitted with any optimiser ( 1004 1008 1012 1016 1025 1030 1032 1038 1044 1050 1048 1059 1060 )
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: Nelder_Mead "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: optimx (L-BFGS-B)"
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: nloptwrap (NLOPT_LN_NELDERMEAD)"
## [1] "Model successfully fitted with optimizer: nlminbwrap "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: Nelder_Mead "
## [1] "Model successfully fitted with optimizer: Nelder_Mead "
## [1] "Model fitting failed with all optimizers."
## This model could not be fitted with any optimiser ( 1001 1008 1012 1020 1024 1027 1035 1038 1044 1046 1055 1059 1061 )
## [1] "Model successfully fitted with optimizer: nlminbwrap "
## [1] "Model successfully fitted with optimizer: nlminbwrap "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: nlminbwrap "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: Nelder_Mead "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: nlminbwrap "
## [1] "Model successfully fitted with optimizer: nlminbwrap "
## [1] "Model successfully fitted with optimizer: Nelder_Mead "
## [1] "Model successfully fitted with optimizer: nlminbwrap "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: nlminbwrap "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: nloptwrap (NLOPT_LN_NELDERMEAD)"
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: nlminbwrap "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: Nelder_Mead "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: nlminbwrap "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: nloptwrap (NLOPT_LN_NELDERMEAD)"
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: nlminbwrap "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model fitting failed with all optimizers."
## This model could not be fitted with any optimiser ( 1002 1008 1011 1019 1025 1028 1031 1040 1042 1052 1048 1059 1063 )
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: nlminbwrap "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: nlminbwrap "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: nlminbwrap "
## [1] "Model successfully fitted with optimizer: nlminbwrap "
## [1] "Model successfully fitted with optimizer: nlminbwrap "
## [1] "Model successfully fitted with optimizer: nlminbwrap "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: Nelder_Mead "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: nlminbwrap "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: Nelder_Mead "
## [1] "Model successfully fitted with optimizer: optimx (L-BFGS-B)"
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: nlminbwrap "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: nlminbwrap "
## [1] "Model successfully fitted with optimizer: nlminbwrap "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: nlminbwrap "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model fitting failed with all optimizers."
## This model could not be fitted with any optimiser ( 1004 1007 1011 1020 1023 1029 1032 1040 1043 1046 1055 1060 1059 )
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: Nelder_Mead "
## [1] "Model successfully fitted with optimizer: Nelder_Mead "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: nlminbwrap "
## [1] "Model successfully fitted with optimizer: Nelder_Mead "
## [1] "Model successfully fitted with optimizer: nloptwrap (NLOPT_LN_NELDERMEAD)"
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: Nelder_Mead "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: nlminbwrap "
## [1] "Model successfully fitted with optimizer: nlminbwrap "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: nloptwrap (NLOPT_LN_NELDERMEAD)"
## [1] "Model fitting failed with all optimizers."
## This model could not be fitted with any optimiser ( 1005 1009 1013 1016 1022 1026 1034 1040 1043 1053 1052 1060 1059 )
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: optimx (L-BFGS-B)"
## [1] "Model successfully fitted with optimizer: nlminbwrap "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: nlminbwrap "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: nlminbwrap "
## [1] "Model successfully fitted with optimizer: nlminbwrap "
## [1] "Model successfully fitted with optimizer: nloptwrap (NLOPT_LN_NELDERMEAD)"
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: nlminbwrap "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: nlminbwrap "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: nlminbwrap "
## [1] "Model successfully fitted with optimizer: nlminbwrap "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: Nelder_Mead "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model fitting failed with all optimizers."
## This model could not be fitted with any optimiser ( 1005 1006 1012 1018 1024 1028 1034 1040 1042 1050 1049 1060 1065 )
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: nlminbwrap "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model fitting failed with all optimizers."
## This model could not be fitted with any optimiser ( 1003 1006 1015 1017 1024 1028 1032 1036 1045 1052 1053 1060 1065 )
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: nlminbwrap "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: nlminbwrap "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: nlminbwrap "
## [1] "Model successfully fitted with optimizer: Nelder_Mead "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: nlminbwrap "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: nlminbwrap "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model fitting failed with all optimizers."
## This model could not be fitted with any optimiser ( 1001 1008 1015 1017 1024 1028 1031 1039 1045 1051 1047 1061 1057 )
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: Nelder_Mead "
## [1] "Model fitting failed with all optimizers."
## This model could not be fitted with any optimiser ( 1003 1010 1014 1017 1021 1029 1033 1040 1042 1052 1048 1061 1057 )
## [1] "Model successfully fitted with optimizer: nloptwrap (NLOPT_LN_NELDERMEAD)"
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: nlminbwrap "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: nloptwrap (NLOPT_LN_NELDERMEAD)"
## [1] "Model successfully fitted with optimizer: Nelder_Mead "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: Nelder_Mead "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: Nelder_Mead "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model fitting failed with all optimizers."
## This model could not be fitted with any optimiser ( 1003 1007 1011 1019 1025 1028 1032 1036 1045 1047 1051 1061 1059 )
## [1] "Model successfully fitted with optimizer: Nelder_Mead "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model fitting failed with all optimizers."
## This model could not be fitted with any optimiser ( 1004 1010 1012 1018 1021 1026 1033 1039 1045 1047 1055 1061 1059 )
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: nlminbwrap "
## [1] "Model successfully fitted with optimizer: Nelder_Mead "
## [1] "Model successfully fitted with optimizer: optimx (L-BFGS-B)"
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model fitting failed with all optimizers."
## This model could not be fitted with any optimiser ( 1004 1006 1012 1018 1025 1027 1031 1040 1044 1051 1047 1061 1059 )
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model fitting failed with all optimizers."
## This model could not be fitted with any optimiser ( 1004 1008 1011 1017 1025 1028 1035 1039 1041 1051 1054 1061 1059 )
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model fitting failed with all optimizers."
## This model could not be fitted with any optimiser ( 1003 1006 1015 1019 1022 1029 1035 1038 1042 1052 1048 1061 1059 )
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: nlminbwrap "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: nlminbwrap "
## [1] "Model successfully fitted with optimizer: optimx (L-BFGS-B)"
## [1] "Model successfully fitted with optimizer: nlminbwrap "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: nlminbwrap "
## [1] "Model successfully fitted with optimizer: Nelder_Mead "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: Nelder_Mead "
## [1] "Model successfully fitted with optimizer: Nelder_Mead "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: Nelder_Mead "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model fitting failed with all optimizers."
## This model could not be fitted with any optimiser ( 1002 1009 1011 1018 1025 1027 1031 1040 1043 1049 1051 1061 1064 )
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: nlminbwrap "
## [1] "Model successfully fitted with optimizer: nlminbwrap "
## [1] "Model successfully fitted with optimizer: nlminbwrap "
## [1] "Model successfully fitted with optimizer: nlminbwrap "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: nlminbwrap "
## [1] "Model fitting failed with all optimizers."
## This model could not be fitted with any optimiser ( 1002 1009 1013 1016 1025 1028 1034 1036 1045 1054 1048 1061 1064 )
## [1] "Model fitting failed with all optimizers."
## This model could not be fitted with any optimiser ( 1002 1006 1013 1019 1025 1028 1034 1040 1041 1054 1051 1061 1064 )
## [1] "Model successfully fitted with optimizer: Nelder_Mead "
## [1] "Model fitting failed with all optimizers."
## This model could not be fitted with any optimiser ( 1004 1007 1015 1018 1021 1027 1034 1038 1041 1055 1047 1061 1064 )
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model fitting failed with all optimizers."
## This model could not be fitted with any optimiser ( 1005 1007 1011 1019 1023 1026 1034 1038 1042 1047 1055 1062 1057 )
## [1] "Model fitting failed with all optimizers."
## This model could not be fitted with any optimiser ( 1001 1007 1015 1018 1024 1028 1035 1036 1042 1048 1050 1062 1057 )
## [1] "Model fitting failed with all optimizers."
## This model could not be fitted with any optimiser ( 1002 1008 1011 1020 1024 1028 1035 1037 1044 1048 1050 1062 1057 )
## [1] "Model fitting failed with all optimizers."
## This model could not be fitted with any optimiser ( 1003 1007 1011 1020 1024 1030 1033 1037 1044 1048 1054 1062 1057 )
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: Nelder_Mead "
## [1] "Model successfully fitted with optimizer: Nelder_Mead "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: nlminbwrap "
## [1] "Model successfully fitted with optimizer: Nelder_Mead "
## [1] "Model successfully fitted with optimizer: nlminbwrap "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model fitting failed with all optimizers."
## This model could not be fitted with any optimiser ( 1003 1009 1012 1016 1025 1026 1033 1037 1045 1051 1047 1062 1057 )
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: Nelder_Mead "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: nloptwrap (NLOPT_LN_NELDERMEAD)"
## [1] "Model successfully fitted with optimizer: nlminbwrap "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: nloptwrap (NLOPT_LN_NELDERMEAD)"
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: Nelder_Mead "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: nlminbwrap "
## [1] "Model successfully fitted with optimizer: Nelder_Mead "
## [1] "Model successfully fitted with optimizer: nlminbwrap "
## [1] "Model successfully fitted with optimizer: Nelder_Mead "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: nlminbwrap "
## [1] "Model successfully fitted with optimizer: nloptwrap (NLOPT_LN_NELDERMEAD)"
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: optimx (L-BFGS-B)"
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model fitting failed with all optimizers."
## This model could not be fitted with any optimiser ( 1004 1007 1011 1018 1025 1029 1032 1038 1045 1052 1048 1062 1058 )
## [1] "Model fitting failed with all optimizers."
## This model could not be fitted with any optimiser ( 1003 1009 1011 1020 1022 1026 1034 1038 1045 1053 1046 1062 1058 )
## [1] "Model successfully fitted with optimizer: nlminbwrap "
## [1] "Model fitting failed with all optimizers."
## This model could not be fitted with any optimiser ( 1001 1007 1015 1018 1024 1028 1031 1037 1045 1053 1052 1062 1058 )
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: Nelder_Mead "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: Nelder_Mead "
## [1] "Model successfully fitted with optimizer: optimx (L-BFGS-B)"
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: nlminbwrap "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: Nelder_Mead "
## [1] "Model successfully fitted with optimizer: nlminbwrap "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: nlminbwrap "
## [1] "Model successfully fitted with optimizer: nloptwrap (NLOPT_LN_NELDERMEAD)"
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: nlminbwrap "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: nlminbwrap "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: nlminbwrap "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model fitting failed with all optimizers."
## This model could not be fitted with any optimiser ( 1005 1006 1014 1017 1023 1027 1034 1036 1043 1053 1046 1062 1063 )
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: Nelder_Mead "
## [1] "Model successfully fitted with optimizer: Nelder_Mead "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: nloptwrap (NLOPT_LN_NELDERMEAD)"
## [1] "Model successfully fitted with optimizer: nlminbwrap "
## [1] "Model successfully fitted with optimizer: nlminbwrap "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model fitting failed with all optimizers."
## This model could not be fitted with any optimiser ( 1004 1007 1015 1018 1021 1028 1032 1039 1045 1055 1050 1062 1063 )
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: nlminbwrap "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model fitting failed with all optimizers."
## This model could not be fitted with any optimiser ( 1003 1006 1014 1020 1022 1029 1033 1040 1042 1047 1055 1063 1056 )
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model fitting failed with all optimizers."
## This model could not be fitted with any optimiser ( 1002 1008 1015 1019 1021 1026 1032 1040 1044 1048 1052 1063 1056 )
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: nlminbwrap "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: Nelder_Mead "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: nlminbwrap "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: nlminbwrap "
## [1] "Model successfully fitted with optimizer: nlminbwrap "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: nlminbwrap "
## [1] "Model successfully fitted with optimizer: nlminbwrap "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model fitting failed with all optimizers."
## This model could not be fitted with any optimiser ( 1005 1006 1012 1019 1023 1026 1035 1039 1043 1047 1051 1063 1059 )
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model fitting failed with all optimizers."
## This model could not be fitted with any optimiser ( 1002 1006 1013 1019 1025 1027 1033 1040 1041 1047 1051 1063 1059 )
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: nloptwrap (NLOPT_LN_NELDERMEAD)"
## [1] "Model successfully fitted with optimizer: Nelder_Mead "
## [1] "Model fitting failed with all optimizers."
## This model could not be fitted with any optimiser ( 1005 1006 1012 1019 1023 1029 1031 1038 1045 1050 1049 1063 1059 )
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: nlminbwrap "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: nlminbwrap "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: nlminbwrap "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: nlminbwrap "
## [1] "Model fitting failed with all optimizers."
## This model could not be fitted with any optimiser ( 1005 1008 1014 1017 1021 1029 1035 1036 1043 1055 1050 1063 1059 )
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model fitting failed with all optimizers."
## This model could not be fitted with any optimiser ( 1005 1008 1012 1016 1024 1028 1032 1039 1041 1046 1054 1063 1062 )
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: nlminbwrap "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: nlminbwrap "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: nlminbwrap "
## [1] "Model successfully fitted with optimizer: nlminbwrap "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: optimx (L-BFGS-B)"
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: nlminbwrap "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: nlminbwrap "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: Nelder_Mead "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: nlminbwrap "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: Nelder_Mead "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: nlminbwrap "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: nlminbwrap "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: nlminbwrap "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: nlminbwrap "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: nlminbwrap "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: nlminbwrap "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: nlminbwrap "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: Nelder_Mead "
## [1] "Model successfully fitted with optimizer: optimx (L-BFGS-B)"
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model fitting failed with all optimizers."
## This model could not be fitted with any optimiser ( 1002 1009 1013 1020 1021 1026 1034 1040 1043 1049 1051 1064 1058 )
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: Nelder_Mead "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: nlminbwrap "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: nloptwrap (NLOPT_LN_NELDERMEAD)"
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: Nelder_Mead "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: optimx (L-BFGS-B)"
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: Nelder_Mead "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: Nelder_Mead "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: nlminbwrap "
## [1] "Model successfully fitted with optimizer: nlminbwrap "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: Nelder_Mead "
## [1] "Model successfully fitted with optimizer: nloptwrap (NLOPT_LN_NELDERMEAD)"
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: Nelder_Mead "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: Nelder_Mead "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: Nelder_Mead "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model fitting failed with all optimizers."
## This model could not be fitted with any optimiser ( 1004 1007 1013 1020 1021 1026 1034 1040 1043 1048 1054 1065 1056 )
## [1] "Model successfully fitted with optimizer: nlminbwrap "
## [1] "Model successfully fitted with optimizer: nlminbwrap "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: Nelder_Mead "
## [1] "Model successfully fitted with optimizer: nloptwrap (NLOPT_LN_NELDERMEAD)"
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: nlminbwrap "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: nlminbwrap "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: nlminbwrap "
## [1] "Model successfully fitted with optimizer: nlminbwrap "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: Nelder_Mead "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: nlminbwrap "
## [1] "Model successfully fitted with optimizer: nloptwrap (NLOPT_LN_NELDERMEAD)"
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: nlminbwrap "
## [1] "Model successfully fitted with optimizer: nlminbwrap "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: nlminbwrap "
## [1] "Model successfully fitted with optimizer: Nelder_Mead "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: nlminbwrap "
## [1] "Model successfully fitted with optimizer: nlminbwrap "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: Nelder_Mead "
## [1] "Model successfully fitted with optimizer: nlminbwrap "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: nloptwrap (NLOPT_LN_NELDERMEAD)"
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: nlminbwrap "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: optimx (L-BFGS-B)"
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model fitting failed with all optimizers."
## This model could not be fitted with any optimiser ( 1003 1006 1015 1019 1022 1028 1034 1040 1042 1054 1046 1065 1057 )
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: Nelder_Mead "
## [1] "Model successfully fitted with optimizer: optimx (L-BFGS-B)"
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model fitting failed with all optimizers."
## This model could not be fitted with any optimiser ( 1003 1007 1014 1020 1021 1026 1034 1037 1043 1047 1051 1065 1060 )
## [1] "Model successfully fitted with optimizer: optimx (L-BFGS-B)"
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model fitting failed with all optimizers."
## This model could not be fitted with any optimiser ( 1001 1009 1015 1018 1022 1026 1033 1037 1045 1047 1055 1065 1060 )
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: nlminbwrap "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model fitting failed with all optimizers."
## This model could not be fitted with any optimiser ( 1004 1010 1011 1018 1022 1026 1033 1037 1044 1049 1051 1065 1060 )
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: nloptwrap (NLOPT_LN_NELDERMEAD)"
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: nloptwrap (NLOPT_LN_NELDERMEAD)"
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model fitting failed with all optimizers."
## This model could not be fitted with any optimiser ( 1005 1008 1014 1016 1022 1030 1033 1036 1042 1053 1052 1065 1060 )
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: nlminbwrap "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model fitting failed with all optimizers."
## This model could not be fitted with any optimiser ( 1001 1010 1014 1017 1023 1029 1035 1036 1043 1055 1050 1065 1060 )
## [1] "Model fitting failed with all optimizers."
## This model could not be fitted with any optimiser ( 1002 1006 1015 1018 1024 1029 1032 1038 1041 1055 1050 1065 1060 )
## [1] "Model successfully fitted with optimizer: bobyqa "
# Tell how many optimizers failed
print(paste((failed_optimizers/bootstrap_iterations * 100),
"% of combinations created models that could not be fit"))
## [1] "6.2 % of combinations created models that could not be fit"
# Bind rows within the list results_table
results_table = results_table %>%
bind_rows()
# Save the results
write.csv(results_table,
file = file_path,
row.names = F)
# --- READ THE RESULTS AFTER LENGTHY COMPUTATION --- #
results_table = read.csv(file = file_path)
# --- SHOW ITERATED RESULTS - P VALUE DISTRIBUTIONS --- #
results_table %>%
filter(contrast == "high MM_conn - MM_unc") %>%
pull(p.value) %>%
hist(main = "p values high MM_conn - MM_unc")
results_table %>%
filter(contrast == "high SL_conn - SL_unc") %>%
pull(p.value) %>%
hist(main = "p values high SL_conn - SL_unc")
results_table %>%
filter(contrast == "low MM_conn - MM_unc") %>%
pull(p.value) %>%
hist(main = "p values low MM_conn - MM_unc")
results_table %>%
filter(contrast == "low SL_conn - SL_unc") %>%
pull(p.value) %>%
hist(main = "p values low SL_conn vs SL_unc")
# --- SHOW ITERATED RESULTS - TABLE WITH AVERAGED VALUES --- #
results_table %>%
group_by(contrast) %>%
summarise(estimate = median(estimate),
SE = median(SE),
df = median(df),
t.ratio = median(t.ratio),
p.value = median(p.value)) %>%
as.data.frame() %>%
mutate(estimate = round(estimate, digits = 3),
SE = round(SE, digits = 3),
df = round(df, digits = 3),
t.ratio = round(t.ratio, digits = 3),
p.value = round(p.value, digits = 3),
evidence = "",
evidence = ifelse(p.value > 0.1,
"none",
evidence),
evidence = ifelse(p.value < 0.1,
"* weak",
evidence),
evidence = ifelse(p.value < 0.05,
"** moderate",
evidence),
evidence = ifelse(p.value < 0.01,
"*** strong",
evidence),
evidence = ifelse(p.value < 0.001,
"**** very strong",
evidence),
p.value = ifelse(p.value < 0.001,
"< 0.001",
p.value))
## contrast estimate SE df t.ratio p.value evidence
## 1 high MM_conn - MM_unc 0.115 0.078 37.56 1.497 0.142 none
## 2 high SL_conn - SL_unc 0.190 0.063 37.56 3.034 0.004 *** strong
## 3 low MM_conn - MM_unc -0.003 0.078 37.56 -0.034 0.933 none
## 4 low SL_conn - SL_unc 0.182 0.059 37.56 3.075 0.004 *** strong
response_variable_selected = "bray_curtis"
# --- ORIGINAL DATA - PLOT MEAN ± 95% CI --- #
# Create plots
p1=plot.metaecos.points(ds_metaecosystems %>% filter(disturbance == "high"),
metaecosystem_type_selected,
response_variable_selected) +
annotate("text",
x = Inf,
y = Inf,
label = "High disturbance",
hjust = 1.1,
vjust = 1.1,
size = 5,
fontface = "bold") + # Adjust position and style
theme(axis.title.x = element_blank(), # Remove x-axis title
axis.text.x = element_blank()) # Remove x-axis text
p2=plot.metaecos.points(ds_metaecosystems %>% filter(disturbance == "low"),
metaecosystem_type_selected,
response_variable_selected) +
theme(legend.position = "none") +
annotate("text",
x = Inf,
y = Inf,
label = "Low disturbance",
hjust = 1.1,
vjust = 1.1,
size = 5,
fontface = "bold") # Adjust position and style
# Arrange the plots vertically
ggarrange(p1,
p2,
ncol = 1,
nrow = 2,
heights = c(0.9, 1),
common.legend = TRUE,
align = "v")
# --- ORIGINAL DATA - PLOT SINGLE REPLICATES --- #
p1=plot.metaecos.replicates(ds_metaecosystems %>% filter(disturbance == "high"),
metaecosystem_type_selected,
response_variable_selected) +
annotate("text",
x = Inf,
y = Inf,
label = "High disturbance",
hjust = 1.1,
vjust = 1.1,
size = 5,
fontface = "bold") + # Adjust position and style
theme(axis.title.x = element_blank(), # Remove x-axis title
axis.text.x = element_blank()) # Remove x-axis text
p2=plot.metaecos.replicates(ds_metaecosystems %>% filter(disturbance == "low"),
metaecosystem_type_selected,
response_variable_selected) +
theme(legend.position = "none") +
annotate("text",
x = Inf,
y = Inf,
label = "Low disturbance",
hjust = 1.1,
vjust = 1.1,
size = 5,
fontface = "bold") # Adjust position and style
# Arrange the plots vertically
ggarrange(p1,
p2,
ncol = 1,
nrow = 2,
heights = c(0.9, 1),
common.legend = TRUE,
align = "v")
# --- PREPARE DATA --- #
# Add baselines
baselines = ds_metaecosystems %>%
filter(time_point == time_point_of_baselines) %>%
select(system_nr,
all_of(response_variable_selected)) %>%
rename(baseline = all_of(response_variable_selected))
data_for_analysis = ds_metaecosystems %>%
left_join(baselines)
# Filter and change level names
data_for_analysis = data_for_analysis %>%
filter(time_point %in% time_points_model,
metaecosystem_type %in% metaecosystem_type_selected,
#!is.na(total_water_addition_ml),
!is.na(!!sym(response_variable_selected))) %>%
mutate(metaecosystem_type = case_when(metaecosystem_type == "Small-Large unconnected" ~ "SL unc",
metaecosystem_type == "Medium-Medium unconnected" ~ "MM unc",
metaecosystem_type == "Small-Small meta-ecosystem" ~ "SS con",
metaecosystem_type == "Medium-Medium meta-ecosystem" ~ "MM con",
metaecosystem_type == "Large-Large meta-ecosystem" ~ "LL con",
metaecosystem_type == "Small-Large meta-ecosystem" ~ "SL con",
TRUE ~ metaecosystem_type),
type_conn = paste(metaecosystem_type, connection))
# --- DATA FOR ANALYSIS - PLOT MEAN ± 95% CI OF FILTERED DATA --- #
p1=plot.metaecos.points(data_for_analysis %>% filter(disturbance == "high"),
metaecosystem_type_selected,
response_variable_selected) +
annotate("text",
x = Inf,
y = Inf,
label = "High disturbance",
hjust = 1.1,
vjust = 1.1,
size = 5,
fontface = "bold") + # Adjust position and style
theme(axis.title.x = element_blank(), # Remove x-axis title
axis.text.x = element_blank()) # Remove x-axis text
p2=plot.metaecos.points(data_for_analysis %>% filter(disturbance == "low"),
metaecosystem_type_selected,
response_variable_selected) +
theme(legend.position = "none") +
annotate("text",
x = Inf,
y = Inf,
label = "Low disturbance",
hjust = 1.1,
vjust = 1.1,
size = 5,
fontface = "bold") # Adjust position and style
# Arrange the plots vertically
ggarrange(p1,
p2,
ncol = 1,
nrow = 2,
heights = c(1, 0.7))
# --- DEFINE MODEL FORMULA --- #
formula = paste("get(response_variable_selected) ~",
"type_conn * disturbance * day +",
"(day | baseline) +",
"(day | system_nr)") %>%
print()
## [1] "get(response_variable_selected) ~ type_conn * disturbance * day + (day | baseline) + (day | system_nr)"
# --- FIND ECOSYSTEM COMBINATIONS FOR ANALYSIS --- #
# Set up parameters
bootstrap_iterations = 1000
# Produce random (bootstrapped) rows
random_sets_of_sets <- runif(bootstrap_iterations,
min = 1,
max = nrow(ecos_combin_unconn_sets_of_sets)) %>%
round()
# Filter combinations according to the random rows
sets_of_sets_filtered = ecos_combin_unconn_sets_of_sets %>%
filter(row_number() %in% random_sets_of_sets)
# --- DEFINE WHERE TO SAVE (AND FROM WHERE TO LATER READ) THE RESULTS OF THE ANALYSIS --- #
file_path = here("3_results", "tables",
paste0("results_metaeco_",
response_variable_selected,
"_",
bootstrap_iterations,
"_iterations",
".csv"))
# --- FIND MODEL STATISTICS THROUGH MULTIPLE ITERATIONS RESHUFFLING ECOSYSTEM COMBINATIONS --- #
# Set parameters and initialise
results_table = list()
failed_optimizers = 0
# Fit models
for (comb_i in 1:bootstrap_iterations) {
# Find system numbers
system_nr_unconn_selected = ecos_combin_unconn_sets %>%
filter(metaecosystem_type == "Small-Large" &
disturbance == "low" &
set == sets_of_sets_filtered[comb_i, ]$set_SL_low |
metaecosystem_type == "Small-Large" &
disturbance == "high" &
set == sets_of_sets_filtered[comb_i, ]$set_SL_high |
metaecosystem_type == "Medium-Medium" &
disturbance == "low" &
set == sets_of_sets_filtered[comb_i, ]$set_MM_low |
metaecosystem_type == "Medium-Medium" &
disturbance == "high" &
set == sets_of_sets_filtered[comb_i, ]$set_MM_high) %>%
pull(system_nr)
# Prepare data for analysis
data_for_analysis_comb_i = data_for_analysis %>%
filter(system_nr %in% system_nr_unconn_selected | connection == "connected")
# Run model
model = try.different.optimizers.metaecos(data_for_analysis_comb_i,
formula)
# If all the optimisers fail, move on to the next iteration
if (is.null(model)) {
cat("This model could not be fitted with any optimiser (",
system_nr_unconn_selected,
") \n")
failed_optimizers = failed_optimizers + 1
next
}
# Residuals - show Q-Q plot
# print(qqnorm(resid(model))); print(qqline(resid(model)))
# Residuals - save Residuals vs Fitted plot
# plot = data_for_analysis_comb_i %>%
# mutate(predicted = fitted(model),
# residuals = resid(model)) %>%
# ggplot(aes(x = predicted,
# y = residuals)) +
# geom_point()
#
# ggsave(here("3_results",
# "residual_plots",
# paste0("res_vs_fit_",
# response_variable_selected,
# "_combination_",
# comb_i,
# ".png")),
# plot = plot,
# width = 8,
# height = 6)
# Set up contrasts
emmeans_output = emmeans(model,
specs = ~ type_conn * day * disturbance ,
method = "pairwise",
adjust = "sidak",
bias.adj = TRUE,
lmer.df = "satterthwaite")
summary(emmeans_output)
zeros = rep(0, 8)
high_MM_conn = zeros; high_MM_conn[1] = 1
high_MM_unc = zeros; high_MM_unc[2] = 1
high_SL_conn = zeros; high_SL_conn[3] = 1
high_SL_unc = zeros; high_SL_unc[4] = 1
low_MM_conn = zeros; low_MM_conn[5] = 1
low_MM_unc = zeros; low_MM_unc[6] = 1
low_SL_conn = zeros; low_SL_conn[7] = 1
low_SL_unc = zeros; low_SL_unc[8] = 1
# Save contrasts in a table
results_table[[comb_i]] = contrast(
emmeans_output,
method = list("high SL_conn - SL_unc" = high_SL_conn - high_SL_unc,
"high MM_conn - MM_unc" = high_MM_conn - high_MM_unc,
"low SL_conn - SL_unc" = low_SL_conn - low_SL_unc,
"low MM_conn - MM_unc" = low_MM_conn - low_MM_unc)) %>%
as.data.frame() %>%
mutate(combination = comb_i,
system_nr_unconnected_systems = paste(system_nr_unconn_selected, collapse = ", "))
}
## [1] "Model successfully fitted with optimizer: optimx (L-BFGS-B)"
## [1] "Model successfully fitted with optimizer: Nelder_Mead "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: nlminbwrap "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: nlminbwrap "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: nlminbwrap "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: optimx (L-BFGS-B)"
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: nlminbwrap "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: nlminbwrap "
## [1] "Model successfully fitted with optimizer: optimx (L-BFGS-B)"
## [1] "Model successfully fitted with optimizer: nlminbwrap "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: optimx (L-BFGS-B)"
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model fitting failed with all optimizers."
## This model could not be fitted with any optimiser ( 1002 1006 1014 1018 1025 1026 1032 1040 1043 1051 1047 1056 1064 )
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: nlminbwrap "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: optimx (L-BFGS-B)"
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: nlminbwrap "
## [1] "Model successfully fitted with optimizer: nlminbwrap "
## [1] "Model fitting failed with all optimizers."
## This model could not be fitted with any optimiser ( 1001 1007 1013 1019 1025 1030 1032 1038 1041 1055 1050 1056 1064 )
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: nlminbwrap "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model fitting failed with all optimizers."
## This model could not be fitted with any optimiser ( 1005 1008 1014 1016 1022 1029 1031 1037 1045 1050 1055 1056 1065 )
## [1] "Model successfully fitted with optimizer: nloptwrap (NLOPT_LN_NELDERMEAD)"
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model fitting failed with all optimizers."
## This model could not be fitted with any optimiser ( 1001 1008 1014 1017 1025 1030 1033 1039 1042 1052 1047 1056 1065 )
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: nlminbwrap "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: nlminbwrap "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: Nelder_Mead "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: Nelder_Mead "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: nloptwrap (NLOPT_LN_NELDERMEAD)"
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: nlminbwrap "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: Nelder_Mead "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: optimx (L-BFGS-B)"
## [1] "Model successfully fitted with optimizer: Nelder_Mead "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: Nelder_Mead "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: Nelder_Mead "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: nlminbwrap "
## [1] "Model successfully fitted with optimizer: nlminbwrap "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model fitting failed with all optimizers."
## This model could not be fitted with any optimiser ( 1005 1008 1014 1016 1022 1030 1031 1037 1043 1053 1052 1057 1062 )
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: nlminbwrap "
## [1] "Model successfully fitted with optimizer: optimx (L-BFGS-B)"
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model fitting failed with all optimizers."
## This model could not be fitted with any optimiser ( 1005 1007 1014 1018 1021 1027 1033 1039 1045 1055 1050 1057 1062 )
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: nlminbwrap "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model fitting failed with all optimizers."
## This model could not be fitted with any optimiser ( 1002 1008 1014 1020 1021 1026 1035 1039 1042 1049 1050 1057 1065 )
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: nlminbwrap "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: nlminbwrap "
## [1] "Model successfully fitted with optimizer: nlminbwrap "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: Nelder_Mead "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: Nelder_Mead "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model fitting failed with all optimizers."
## This model could not be fitted with any optimiser ( 1002 1008 1014 1016 1025 1030 1034 1038 1042 1049 1053 1058 1060 )
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: optimx (L-BFGS-B)"
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: Nelder_Mead "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: Nelder_Mead "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: nlminbwrap "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: nlminbwrap "
## [1] "Model successfully fitted with optimizer: Nelder_Mead "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model fitting failed with all optimizers."
## This model could not be fitted with any optimiser ( 1005 1009 1011 1017 1023 1027 1034 1038 1041 1052 1047 1058 1062 )
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: nlminbwrap "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: nlminbwrap "
## [1] "Model successfully fitted with optimizer: optimx (L-BFGS-B)"
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: nlminbwrap "
## [1] "Model fitting failed with all optimizers."
## This model could not be fitted with any optimiser ( 1003 1010 1011 1019 1022 1028 1031 1039 1045 1048 1052 1058 1064 )
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: nlminbwrap "
## [1] "Model successfully fitted with optimizer: optimx (L-BFGS-B)"
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model fitting failed with all optimizers."
## This model could not be fitted with any optimiser ( 1003 1007 1015 1019 1021 1027 1031 1040 1043 1050 1048 1058 1064 )
## [1] "Model successfully fitted with optimizer: Nelder_Mead "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: nloptwrap (NLOPT_LN_NELDERMEAD)"
## [1] "Model successfully fitted with optimizer: nloptwrap (NLOPT_LN_NELDERMEAD)"
## [1] "Model fitting failed with all optimizers."
## This model could not be fitted with any optimiser ( 1005 1009 1012 1018 1021 1028 1035 1037 1041 1052 1047 1058 1064 )
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: nlminbwrap "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: Nelder_Mead "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: nlminbwrap "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: nlminbwrap "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: Nelder_Mead "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: Nelder_Mead "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: nlminbwrap "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: nlminbwrap "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model fitting failed with all optimizers."
## This model could not be fitted with any optimiser ( 1004 1010 1011 1017 1023 1026 1033 1040 1044 1054 1051 1059 1060 )
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: Nelder_Mead "
## [1] "Model successfully fitted with optimizer: nlminbwrap "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model fitting failed with all optimizers."
## This model could not be fitted with any optimiser ( 1003 1006 1014 1020 1022 1030 1033 1036 1044 1047 1051 1059 1061 )
## [1] "Model successfully fitted with optimizer: optimx (L-BFGS-B)"
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: nlminbwrap "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: nlminbwrap "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: nlminbwrap "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: Nelder_Mead "
## [1] "Model fitting failed with all optimizers."
## This model could not be fitted with any optimiser ( 1004 1006 1013 1017 1025 1030 1033 1037 1044 1050 1055 1059 1063 )
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: Nelder_Mead "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: nloptwrap (NLOPT_LN_NELDERMEAD)"
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: Nelder_Mead "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: nlminbwrap "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: Nelder_Mead "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: nloptwrap (NLOPT_LN_NELDERMEAD)"
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: Nelder_Mead "
## [1] "Model successfully fitted with optimizer: nlminbwrap "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: nlminbwrap "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: nlminbwrap "
## [1] "Model successfully fitted with optimizer: Nelder_Mead "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model fitting failed with all optimizers."
## This model could not be fitted with any optimiser ( 1001 1010 1014 1018 1022 1026 1035 1039 1043 1050 1055 1060 1059 )
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: nlminbwrap "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: Nelder_Mead "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: nloptwrap (NLOPT_LN_NELDERMEAD)"
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model fitting failed with all optimizers."
## This model could not be fitted with any optimiser ( 1004 1007 1013 1016 1025 1029 1031 1038 1042 1055 1047 1060 1059 )
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: Nelder_Mead "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: optimx (L-BFGS-B)"
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: Nelder_Mead "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model fitting failed with all optimizers."
## This model could not be fitted with any optimiser ( 1005 1008 1014 1017 1021 1026 1032 1038 1045 1053 1046 1060 1065 )
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model fitting failed with all optimizers."
## This model could not be fitted with any optimiser ( 1002 1006 1014 1018 1025 1030 1031 1037 1044 1053 1052 1060 1065 )
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: Nelder_Mead "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: nlminbwrap "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: nlminbwrap "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: Nelder_Mead "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: nlminbwrap "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: Nelder_Mead "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: nlminbwrap "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: Nelder_Mead "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: nlminbwrap "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model fitting failed with all optimizers."
## This model could not be fitted with any optimiser ( 1003 1009 1011 1017 1025 1030 1034 1036 1042 1051 1054 1061 1059 )
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: nloptwrap (NLOPT_LN_NELDERMEAD)"
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: Nelder_Mead "
## [1] "Model successfully fitted with optimizer: nlminbwrap "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: Nelder_Mead "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: nlminbwrap "
## [1] "Model successfully fitted with optimizer: Nelder_Mead "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: Nelder_Mead "
## [1] "Model successfully fitted with optimizer: nlminbwrap "
## [1] "Model successfully fitted with optimizer: nlminbwrap "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: nlminbwrap "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: Nelder_Mead "
## [1] "Model successfully fitted with optimizer: Nelder_Mead "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: nlminbwrap "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: nlminbwrap "
## [1] "Model successfully fitted with optimizer: Nelder_Mead "
## [1] "Model successfully fitted with optimizer: nlminbwrap "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: nlminbwrap "
## [1] "Model successfully fitted with optimizer: optimx (L-BFGS-B)"
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: nlminbwrap "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: Nelder_Mead "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model fitting failed with all optimizers."
## This model could not be fitted with any optimiser ( 1003 1010 1012 1019 1021 1027 1034 1038 1041 1054 1046 1062 1057 )
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: nlminbwrap "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: nlminbwrap "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: nlminbwrap "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: Nelder_Mead "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: nlminbwrap "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: Nelder_Mead "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: nlminbwrap "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: nlminbwrap "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: nlminbwrap "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: Nelder_Mead "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: nlminbwrap "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: nlminbwrap "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: Nelder_Mead "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: nlminbwrap "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: nloptwrap (NLOPT_LN_NELDERMEAD)"
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: Nelder_Mead "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: Nelder_Mead "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: optimx (L-BFGS-B)"
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: nlminbwrap "
## [1] "Model successfully fitted with optimizer: nlminbwrap "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: Nelder_Mead "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: Nelder_Mead "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: nlminbwrap "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: nlminbwrap "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model fitting failed with all optimizers."
## This model could not be fitted with any optimiser ( 1002 1010 1011 1019 1023 1026 1034 1038 1042 1050 1048 1063 1062 )
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: nlminbwrap "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model fitting failed with all optimizers."
## This model could not be fitted with any optimiser ( 1002 1006 1013 1019 1025 1028 1032 1036 1044 1053 1046 1063 1062 )
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: Nelder_Mead "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: optimx (L-BFGS-B)"
## [1] "Model successfully fitted with optimizer: optimx (L-BFGS-B)"
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: nlminbwrap "
## [1] "Model successfully fitted with optimizer: nlminbwrap "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: nloptwrap (NLOPT_LN_NELDERMEAD)"
## [1] "Model successfully fitted with optimizer: nlminbwrap "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model fitting failed with all optimizers."
## This model could not be fitted with any optimiser ( 1002 1008 1011 1019 1025 1029 1033 1040 1042 1050 1048 1064 1056 )
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model fitting failed with all optimizers."
## This model could not be fitted with any optimiser ( 1001 1007 1014 1018 1025 1029 1032 1036 1045 1050 1055 1064 1056 )
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model fitting failed with all optimizers."
## This model could not be fitted with any optimiser ( 1004 1008 1015 1016 1022 1027 1035 1039 1043 1051 1049 1064 1056 )
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model fitting failed with all optimizers."
## This model could not be fitted with any optimiser ( 1004 1007 1013 1020 1021 1030 1034 1037 1041 1054 1051 1064 1056 )
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model fitting failed with all optimizers."
## This model could not be fitted with any optimiser ( 1002 1010 1014 1018 1021 1027 1033 1040 1041 1047 1055 1064 1058 )
## [1] "Model successfully fitted with optimizer: Nelder_Mead "
## [1] "Model successfully fitted with optimizer: Nelder_Mead "
## [1] "Model fitting failed with all optimizers."
## This model could not be fitted with any optimiser ( 1005 1007 1013 1019 1021 1028 1032 1039 1045 1049 1050 1064 1058 )
## [1] "Model successfully fitted with optimizer: nlminbwrap "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: nlminbwrap "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: nlminbwrap "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: nloptwrap (NLOPT_LN_NELDERMEAD)"
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: Nelder_Mead "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: nlminbwrap "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: nloptwrap (NLOPT_LN_NELDERMEAD)"
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: nlminbwrap "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: Nelder_Mead "
## [1] "Model successfully fitted with optimizer: nlminbwrap "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: nlminbwrap "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: Nelder_Mead "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: Nelder_Mead "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: nlminbwrap "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: nlminbwrap "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: Nelder_Mead "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: nlminbwrap "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: nlminbwrap "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: nlminbwrap "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: optimx (L-BFGS-B)"
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: Nelder_Mead "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: nlminbwrap "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: Nelder_Mead "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: Nelder_Mead "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: nlminbwrap "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: nlminbwrap "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model fitting failed with all optimizers."
## This model could not be fitted with any optimiser ( 1005 1008 1014 1016 1022 1029 1035 1036 1042 1053 1046 1065 1060 )
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model fitting failed with all optimizers."
## This model could not be fitted with any optimiser ( 1004 1010 1012 1016 1023 1029 1035 1037 1041 1055 1047 1065 1060 )
## [1] "Model successfully fitted with optimizer: nlminbwrap "
## [1] "Model successfully fitted with optimizer: bobyqa "
# Tell how many optimizers failed
print(paste((failed_optimizers/bootstrap_iterations * 100),
"% of combinations created models that could not be fit"))
## [1] "3.1 % of combinations created models that could not be fit"
# Bind rows within the list results_table
results_table = results_table %>%
bind_rows()
# Save the results
write.csv(results_table,
file = file_path,
row.names = F)
# --- READ THE RESULTS AFTER LENGTHY COMPUTATION --- #
results_table = read.csv(file = file_path)
# --- SHOW ITERATED RESULTS - P VALUE DISTRIBUTIONS --- #
results_table %>%
filter(contrast == "high MM_conn - MM_unc") %>%
pull(p.value) %>%
hist(main = "p values high MM_conn - MM_unc")
results_table %>%
filter(contrast == "high SL_conn - SL_unc") %>%
pull(p.value) %>%
hist(main = "p values high SL_conn - SL_unc")
results_table %>%
filter(contrast == "low MM_conn - MM_unc") %>%
pull(p.value) %>%
hist(main = "p values low MM_conn - MM_unc")
results_table %>%
filter(contrast == "low SL_conn - SL_unc") %>%
pull(p.value) %>%
hist(main = "p values low SL_conn vs SL_unc")
# --- SHOW ITERATED RESULTS - TABLE WITH AVERAGED VALUES --- #
results_table %>%
group_by(contrast) %>%
summarise(estimate = median(estimate),
SE = median(SE),
df = median(df),
t.ratio = median(t.ratio),
p.value = median(p.value)) %>%
as.data.frame() %>%
mutate(estimate = round(estimate, digits = 3),
SE = round(SE, digits = 3),
df = round(df, digits = 3),
t.ratio = round(t.ratio, digits = 3),
p.value = round(p.value, digits = 3),
evidence = "",
evidence = ifelse(p.value > 0.1,
"none",
evidence),
evidence = ifelse(p.value < 0.1,
"* weak",
evidence),
evidence = ifelse(p.value < 0.05,
"** moderate",
evidence),
evidence = ifelse(p.value < 0.01,
"*** strong",
evidence),
evidence = ifelse(p.value < 0.001,
"**** very strong",
evidence),
p.value = ifelse(p.value < 0.001,
"< 0.001",
p.value))
## contrast estimate SE df t.ratio p.value evidence
## 1 high MM_conn - MM_unc 0.064 0.045 44.803 1.377 0.174 none
## 2 high SL_conn - SL_unc -0.124 0.036 44.803 -3.407 0.001 *** strong
## 3 low MM_conn - MM_unc 0.065 0.045 44.803 1.432 0.157 none
## 4 low SL_conn - SL_unc -0.129 0.034 44.803 -3.800 < 0.001 **** very strong
response_variable_selected = "metaecosystem_richness"
# --- ORIGINAL DATA - PLOT MEAN ± 95% CI --- #
# Create plots
p1=plot.metaecos.points(ds_metaecosystems %>% filter(disturbance == "high"),
metaecosystem_type_selected,
response_variable_selected) +
annotate("text",
x = Inf,
y = Inf,
label = "High disturbance",
hjust = 1.1,
vjust = 1.1,
size = 5,
fontface = "bold") + # Adjust position and style
theme(axis.title.x = element_blank(), # Remove x-axis title
axis.text.x = element_blank()) # Remove x-axis text
p2=plot.metaecos.points(ds_metaecosystems %>% filter(disturbance == "low"),
metaecosystem_type_selected,
response_variable_selected) +
theme(legend.position = "none") +
annotate("text",
x = Inf,
y = Inf,
label = "Low disturbance",
hjust = 1.1,
vjust = 1.1,
size = 5,
fontface = "bold") # Adjust position and style
# Arrange the plots vertically
ggarrange(p1,
p2,
ncol = 1,
nrow = 2,
heights = c(0.9, 1),
common.legend = TRUE,
align = "v")
# --- ORIGINAL DATA - PLOT SINGLE REPLICATES --- #
p1=plot.metaecos.replicates(ds_metaecosystems %>% filter(disturbance == "high"),
metaecosystem_type_selected,
response_variable_selected) +
annotate("text",
x = Inf,
y = Inf,
label = "High disturbance",
hjust = 1.1,
vjust = 1.1,
size = 5,
fontface = "bold") + # Adjust position and style
theme(axis.title.x = element_blank(), # Remove x-axis title
axis.text.x = element_blank()) # Remove x-axis text
p2=plot.metaecos.replicates(ds_metaecosystems %>% filter(disturbance == "low"),
metaecosystem_type_selected,
response_variable_selected) +
theme(legend.position = "none") +
annotate("text",
x = Inf,
y = Inf,
label = "Low disturbance",
hjust = 1.1,
vjust = 1.1,
size = 5,
fontface = "bold") # Adjust position and style
# Arrange the plots vertically
ggarrange(p1,
p2,
ncol = 1,
nrow = 2,
heights = c(0.9, 1),
common.legend = TRUE,
align = "v")
# --- PREPARE DATA --- #
# Add baselines
baselines = ds_metaecosystems %>%
filter(time_point == time_point_of_baselines) %>%
select(system_nr,
all_of(response_variable_selected)) %>%
rename(baseline = all_of(response_variable_selected))
data_for_analysis = ds_metaecosystems %>%
left_join(baselines)
# Filter and change level names
data_for_analysis = data_for_analysis %>%
filter(time_point %in% time_points_model,
metaecosystem_type %in% metaecosystem_type_selected,
#!is.na(total_water_addition_ml),
!is.na(!!sym(response_variable_selected))) %>%
mutate(metaecosystem_type = case_when(metaecosystem_type == "Small-Large unconnected" ~ "SL unc",
metaecosystem_type == "Medium-Medium unconnected" ~ "MM unc",
metaecosystem_type == "Small-Small meta-ecosystem" ~ "SS con",
metaecosystem_type == "Medium-Medium meta-ecosystem" ~ "MM con",
metaecosystem_type == "Large-Large meta-ecosystem" ~ "LL con",
metaecosystem_type == "Small-Large meta-ecosystem" ~ "SL con",
TRUE ~ metaecosystem_type),
type_conn = paste(metaecosystem_type, connection))
# --- DATA FOR ANALYSIS - PLOT MEAN ± 95% CI OF FILTERED DATA --- #
p1=plot.metaecos.points(data_for_analysis %>% filter(disturbance == "high"),
metaecosystem_type_selected,
response_variable_selected) +
annotate("text",
x = Inf,
y = Inf,
label = "High disturbance",
hjust = 1.1,
vjust = 1.1,
size = 5,
fontface = "bold") + # Adjust position and style
theme(axis.title.x = element_blank(), # Remove x-axis title
axis.text.x = element_blank()) # Remove x-axis text
p2=plot.metaecos.points(data_for_analysis %>% filter(disturbance == "low"),
metaecosystem_type_selected,
response_variable_selected) +
theme(legend.position = "none") +
annotate("text",
x = Inf,
y = Inf,
label = "Low disturbance",
hjust = 1.1,
vjust = 1.1,
size = 5,
fontface = "bold") # Adjust position and style
# Arrange the plots vertically
ggarrange(p1,
p2,
ncol = 1,
nrow = 2,
heights = c(1, 0.7))
# --- DEFINE MODEL FORMULA --- #
formula = paste("get(response_variable_selected) ~",
"type_conn * disturbance * day +",
"(day | baseline) +",
"(day | system_nr)") %>%
print()
## [1] "get(response_variable_selected) ~ type_conn * disturbance * day + (day | baseline) + (day | system_nr)"
# --- FIND ECOSYSTEM COMBINATIONS FOR ANALYSIS --- #
# Set up parameters
bootstrap_iterations = 1000
# Produce random (bootstrapped) rows
random_sets_of_sets <- runif(bootstrap_iterations,
min = 1,
max = nrow(ecos_combin_unconn_sets_of_sets)) %>%
round()
# Filter combinations according to the random rows
sets_of_sets_filtered = ecos_combin_unconn_sets_of_sets %>%
filter(row_number() %in% random_sets_of_sets)
# --- DEFINE WHERE TO SAVE (AND FROM WHERE TO LATER READ) THE RESULTS OF THE ANALYSIS --- #
file_path = here("3_results", "tables",
paste0("results_metaeco_",
response_variable_selected,
"_",
bootstrap_iterations,
"_iterations",
".csv"))
# --- FIND MODEL STATISTICS THROUGH MULTIPLE ITERATIONS RESHUFFLING ECOSYSTEM COMBINATIONS --- #
# Set parameters and initialise
results_table = list()
failed_optimizers = 0
# Fit models
for (comb_i in 1:bootstrap_iterations) {
# Find system numbers
system_nr_unconn_selected = ecos_combin_unconn_sets %>%
filter(metaecosystem_type == "Small-Large" &
disturbance == "low" &
set == sets_of_sets_filtered[comb_i, ]$set_SL_low |
metaecosystem_type == "Small-Large" &
disturbance == "high" &
set == sets_of_sets_filtered[comb_i, ]$set_SL_high |
metaecosystem_type == "Medium-Medium" &
disturbance == "low" &
set == sets_of_sets_filtered[comb_i, ]$set_MM_low |
metaecosystem_type == "Medium-Medium" &
disturbance == "high" &
set == sets_of_sets_filtered[comb_i, ]$set_MM_high) %>%
pull(system_nr)
# Prepare data for analysis
data_for_analysis_comb_i = data_for_analysis %>%
filter(system_nr %in% system_nr_unconn_selected | connection == "connected")
# Run model
model = try.different.optimizers.metaecos(data_for_analysis_comb_i,
formula)
# If all the optimisers fail, move on to the next iteration
if (is.null(model)) {
cat("This model could not be fitted with any optimiser (",
system_nr_unconn_selected,
") \n")
failed_optimizers = failed_optimizers + 1
next
}
# Residuals - show Q-Q plot
# print(qqnorm(resid(model))); print(qqline(resid(model)))
# Residuals - save Residuals vs Fitted plot
# plot = data_for_analysis_comb_i %>%
# mutate(predicted = fitted(model),
# residuals = resid(model)) %>%
# ggplot(aes(x = predicted,
# y = residuals)) +
# geom_point()
#
# ggsave(here("3_results",
# "residual_plots",
# paste0("res_vs_fit_",
# response_variable_selected,
# "_combination_",
# comb_i,
# ".png")),
# plot = plot,
# width = 8,
# height = 6)
# Set up contrasts
emmeans_output = emmeans(model,
specs = ~ type_conn * day * disturbance ,
method = "pairwise",
adjust = "sidak",
bias.adj = TRUE,
lmer.df = "satterthwaite")
summary(emmeans_output)
zeros = rep(0, 8)
high_MM_conn = zeros; high_MM_conn[1] = 1
high_MM_unc = zeros; high_MM_unc[2] = 1
high_SL_conn = zeros; high_SL_conn[3] = 1
high_SL_unc = zeros; high_SL_unc[4] = 1
low_MM_conn = zeros; low_MM_conn[5] = 1
low_MM_unc = zeros; low_MM_unc[6] = 1
low_SL_conn = zeros; low_SL_conn[7] = 1
low_SL_unc = zeros; low_SL_unc[8] = 1
# Save contrasts in a table
results_table[[comb_i]] = contrast(
emmeans_output,
method = list("high SL_conn - SL_unc" = high_SL_conn - high_SL_unc,
"high MM_conn - MM_unc" = high_MM_conn - high_MM_unc,
"low SL_conn - SL_unc" = low_SL_conn - low_SL_unc,
"low MM_conn - MM_unc" = low_MM_conn - low_MM_unc)) %>%
as.data.frame() %>%
mutate(combination = comb_i,
system_nr_unconnected_systems = paste(system_nr_unconn_selected, collapse = ", "))
}
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: optimx (L-BFGS-B)"
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: optimx (L-BFGS-B)"
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: optimx (L-BFGS-B)"
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: optimx (L-BFGS-B)"
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: optimx (L-BFGS-B)"
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: optimx (L-BFGS-B)"
## [1] "Model successfully fitted with optimizer: optimx (L-BFGS-B)"
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: optimx (L-BFGS-B)"
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: optimx (L-BFGS-B)"
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: optimx (L-BFGS-B)"
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: optimx (L-BFGS-B)"
## [1] "Model successfully fitted with optimizer: optimx (L-BFGS-B)"
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: optimx (L-BFGS-B)"
## [1] "Model successfully fitted with optimizer: optimx (L-BFGS-B)"
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: optimx (L-BFGS-B)"
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: optimx (L-BFGS-B)"
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: optimx (L-BFGS-B)"
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: optimx (L-BFGS-B)"
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: optimx (L-BFGS-B)"
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: optimx (L-BFGS-B)"
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: optimx (L-BFGS-B)"
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: optimx (L-BFGS-B)"
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: optimx (L-BFGS-B)"
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: nlminbwrap "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: optimx (L-BFGS-B)"
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: nlminbwrap "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: nlminbwrap "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: nlminbwrap "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: nlminbwrap "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: optimx (L-BFGS-B)"
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: optimx (L-BFGS-B)"
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: optimx (L-BFGS-B)"
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: optimx (L-BFGS-B)"
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: optimx (L-BFGS-B)"
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: optimx (L-BFGS-B)"
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: optimx (L-BFGS-B)"
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: nlminbwrap "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: optimx (L-BFGS-B)"
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: optimx (L-BFGS-B)"
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: optimx (L-BFGS-B)"
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: Nelder_Mead "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: optimx (L-BFGS-B)"
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: optimx (L-BFGS-B)"
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: optimx (L-BFGS-B)"
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: optimx (L-BFGS-B)"
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: optimx (L-BFGS-B)"
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: optimx (L-BFGS-B)"
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: optimx (L-BFGS-B)"
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: optimx (L-BFGS-B)"
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: optimx (L-BFGS-B)"
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: Nelder_Mead "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: Nelder_Mead "
## [1] "Model successfully fitted with optimizer: nlminbwrap "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: optimx (L-BFGS-B)"
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: optimx (L-BFGS-B)"
## [1] "Model successfully fitted with optimizer: nlminbwrap "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: nloptwrap (NLOPT_LN_NELDERMEAD)"
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: optimx (L-BFGS-B)"
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: optimx (L-BFGS-B)"
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: nlminbwrap "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: optimx (L-BFGS-B)"
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: optimx (L-BFGS-B)"
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: optimx (L-BFGS-B)"
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: optimx (L-BFGS-B)"
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: optimx (L-BFGS-B)"
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: optimx (L-BFGS-B)"
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: optimx (L-BFGS-B)"
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: optimx (L-BFGS-B)"
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: optimx (L-BFGS-B)"
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: optimx (L-BFGS-B)"
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: optimx (L-BFGS-B)"
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: optimx (L-BFGS-B)"
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: optimx (L-BFGS-B)"
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
# Tell how many optimizers failed
print(paste((failed_optimizers/bootstrap_iterations * 100),
"% of combinations created models that could not be fit"))
## [1] "0 % of combinations created models that could not be fit"
# Bind rows within the list results_table
results_table = results_table %>%
bind_rows()
# Save the results
write.csv(results_table,
file = file_path,
row.names = F)
# --- READ THE RESULTS AFTER LENGTHY COMPUTATION --- #
results_table = read.csv(file = file_path)
# --- SHOW ITERATED RESULTS - P VALUE DISTRIBUTIONS --- #
results_table %>%
filter(contrast == "high MM_conn - MM_unc") %>%
pull(p.value) %>%
hist(main = "p values high MM_conn - MM_unc")
results_table %>%
filter(contrast == "high SL_conn - SL_unc") %>%
pull(p.value) %>%
hist(main = "p values high SL_conn - SL_unc")
results_table %>%
filter(contrast == "low MM_conn - MM_unc") %>%
pull(p.value) %>%
hist(main = "p values low MM_conn - MM_unc")
results_table %>%
filter(contrast == "low SL_conn - SL_unc") %>%
pull(p.value) %>%
hist(main = "p values low SL_conn vs SL_unc")
# --- SHOW ITERATED RESULTS - TABLE WITH AVERAGED VALUES --- #
results_table %>%
group_by(contrast) %>%
summarise(estimate = median(estimate),
SE = median(SE),
df = median(df),
t.ratio = median(t.ratio),
p.value = median(p.value)) %>%
as.data.frame() %>%
mutate(estimate = round(estimate, digits = 3),
SE = round(SE, digits = 3),
df = round(df, digits = 3),
t.ratio = round(t.ratio, digits = 3),
p.value = round(p.value, digits = 3),
evidence = "",
evidence = ifelse(p.value > 0.1,
"none",
evidence),
evidence = ifelse(p.value < 0.1,
"* weak",
evidence),
evidence = ifelse(p.value < 0.05,
"** moderate",
evidence),
evidence = ifelse(p.value < 0.01,
"*** strong",
evidence),
evidence = ifelse(p.value < 0.001,
"**** very strong",
evidence),
p.value = ifelse(p.value < 0.001,
"< 0.001",
p.value))
## contrast estimate SE df t.ratio p.value evidence
## 1 high MM_conn - MM_unc 0.367 0.442 39.171 0.822 0.416 none
## 2 high SL_conn - SL_unc -0.339 0.353 38.411 -0.950 0.348 none
## 3 low MM_conn - MM_unc 0.967 0.445 39.581 2.173 0.036 ** moderate
## 4 low SL_conn - SL_unc 0.033 0.333 38.467 0.097 0.872 none
response_variable_selected = "total_metaecosystem_bioarea_mm2"
# --- ORIGINAL DATA - PLOT MEAN ± 95% CI --- #
# Create plots
p1=plot.metaecos.points(ds_metaecosystems %>% filter(disturbance == "high"),
metaecosystem_type_selected,
response_variable_selected) +
annotate("text",
x = Inf,
y = Inf,
label = "High disturbance",
hjust = 1.1,
vjust = 1.1,
size = 5,
fontface = "bold") + # Adjust position and style
theme(axis.title.x = element_blank(), # Remove x-axis title
axis.text.x = element_blank()) # Remove x-axis text
p2=plot.metaecos.points(ds_metaecosystems %>% filter(disturbance == "low"),
metaecosystem_type_selected,
response_variable_selected) +
theme(legend.position = "none") +
annotate("text",
x = Inf,
y = Inf,
label = "Low disturbance",
hjust = 1.1,
vjust = 1.1,
size = 5,
fontface = "bold") # Adjust position and style
# Arrange the plots vertically
ggarrange(p1,
p2,
ncol = 1,
nrow = 2,
heights = c(0.9, 1),
common.legend = TRUE,
align = "v")
# --- ORIGINAL DATA - PLOT SINGLE REPLICATES --- #
p1=plot.metaecos.replicates(ds_metaecosystems %>% filter(disturbance == "high"),
metaecosystem_type_selected,
response_variable_selected) +
annotate("text",
x = Inf,
y = Inf,
label = "High disturbance",
hjust = 1.1,
vjust = 1.1,
size = 5,
fontface = "bold") + # Adjust position and style
theme(axis.title.x = element_blank(), # Remove x-axis title
axis.text.x = element_blank()) # Remove x-axis text
p2=plot.metaecos.replicates(ds_metaecosystems %>% filter(disturbance == "low"),
metaecosystem_type_selected,
response_variable_selected) +
theme(legend.position = "none") +
annotate("text",
x = Inf,
y = Inf,
label = "Low disturbance",
hjust = 1.1,
vjust = 1.1,
size = 5,
fontface = "bold") # Adjust position and style
# Arrange the plots vertically
ggarrange(p1,
p2,
ncol = 1,
nrow = 2,
heights = c(0.9, 1),
common.legend = TRUE,
align = "v")
# --- PREPARE DATA --- #
# Add baselines
baselines = ds_metaecosystems %>%
filter(time_point == time_point_of_baselines) %>%
select(system_nr,
all_of(response_variable_selected)) %>%
rename(baseline = all_of(response_variable_selected))
data_for_analysis = ds_metaecosystems %>%
left_join(baselines)
# Filter and change level names
data_for_analysis = data_for_analysis %>%
filter(time_point %in% time_points_model,
metaecosystem_type %in% metaecosystem_type_selected,
#!is.na(total_water_addition_ml),
!is.na(!!sym(response_variable_selected))) %>%
mutate(metaecosystem_type = case_when(metaecosystem_type == "Small-Large unconnected" ~ "SL unc",
metaecosystem_type == "Medium-Medium unconnected" ~ "MM unc",
metaecosystem_type == "Small-Small meta-ecosystem" ~ "SS con",
metaecosystem_type == "Medium-Medium meta-ecosystem" ~ "MM con",
metaecosystem_type == "Large-Large meta-ecosystem" ~ "LL con",
metaecosystem_type == "Small-Large meta-ecosystem" ~ "SL con",
TRUE ~ metaecosystem_type),
type_conn = paste(metaecosystem_type, connection))
# --- DATA FOR ANALYSIS - PLOT MEAN ± 95% CI OF FILTERED DATA --- #
p1=plot.metaecos.points(data_for_analysis %>% filter(disturbance == "high"),
metaecosystem_type_selected,
response_variable_selected) +
annotate("text",
x = Inf,
y = Inf,
label = "High disturbance",
hjust = 1.1,
vjust = 1.1,
size = 5,
fontface = "bold") + # Adjust position and style
theme(axis.title.x = element_blank(), # Remove x-axis title
axis.text.x = element_blank()) # Remove x-axis text
p2=plot.metaecos.points(data_for_analysis %>% filter(disturbance == "low"),
metaecosystem_type_selected,
response_variable_selected) +
theme(legend.position = "none") +
annotate("text",
x = Inf,
y = Inf,
label = "Low disturbance",
hjust = 1.1,
vjust = 1.1,
size = 5,
fontface = "bold") # Adjust position and style
# Arrange the plots vertically
ggarrange(p1,
p2,
ncol = 1,
nrow = 2,
heights = c(1, 0.7))
# --- DEFINE MODEL FORMULA --- #
formula = paste("get(response_variable_selected) ~",
"type_conn * disturbance * day +",
"(day | baseline) +",
"(day | system_nr)") %>%
print()
## [1] "get(response_variable_selected) ~ type_conn * disturbance * day + (day | baseline) + (day | system_nr)"
# --- FIND ECOSYSTEM COMBINATIONS FOR ANALYSIS --- #
# Set up parameters
bootstrap_iterations = 1000
# Produce random (bootstrapped) rows
random_sets_of_sets <- runif(bootstrap_iterations,
min = 1,
max = nrow(ecos_combin_unconn_sets_of_sets)) %>%
round()
# Filter combinations according to the random rows
sets_of_sets_filtered = ecos_combin_unconn_sets_of_sets %>%
filter(row_number() %in% random_sets_of_sets)
# --- DEFINE WHERE TO SAVE (AND FROM WHERE TO LATER READ) THE RESULTS OF THE ANALYSIS --- #
file_path = here("3_results", "tables",
paste0("results_metaeco_",
response_variable_selected,
"_",
bootstrap_iterations,
"_iterations",
".csv"))
# --- FIND MODEL STATISTICS THROUGH MULTIPLE ITERATIONS RESHUFFLING ECOSYSTEM COMBINATIONS --- #
# Set parameters and initialise
results_table = list()
failed_optimizers = 0
# Fit models
for (comb_i in 1:bootstrap_iterations) {
# Find system numbers
system_nr_unconn_selected = ecos_combin_unconn_sets %>%
filter(metaecosystem_type == "Small-Large" &
disturbance == "low" &
set == sets_of_sets_filtered[comb_i, ]$set_SL_low |
metaecosystem_type == "Small-Large" &
disturbance == "high" &
set == sets_of_sets_filtered[comb_i, ]$set_SL_high |
metaecosystem_type == "Medium-Medium" &
disturbance == "low" &
set == sets_of_sets_filtered[comb_i, ]$set_MM_low |
metaecosystem_type == "Medium-Medium" &
disturbance == "high" &
set == sets_of_sets_filtered[comb_i, ]$set_MM_high) %>%
pull(system_nr)
# Prepare data for analysis
data_for_analysis_comb_i = data_for_analysis %>%
filter(system_nr %in% system_nr_unconn_selected | connection == "connected")
# Run model
model = try.different.optimizers.metaecos(data_for_analysis_comb_i,
formula)
# If all the optimisers fail, move on to the next iteration
if (is.null(model)) {
cat("This model could not be fitted with any optimiser (",
system_nr_unconn_selected,
") \n")
failed_optimizers = failed_optimizers + 1
next
}
# Residuals - show Q-Q plot
# print(qqnorm(resid(model))); print(qqline(resid(model)))
# Residuals - save Residuals vs Fitted plot
# plot = data_for_analysis_comb_i %>%
# mutate(predicted = fitted(model),
# residuals = resid(model)) %>%
# ggplot(aes(x = predicted,
# y = residuals)) +
# geom_point()
#
# ggsave(here("3_results",
# "residual_plots",
# paste0("res_vs_fit_",
# response_variable_selected,
# "_combination_",
# comb_i,
# ".png")),
# plot = plot,
# width = 8,
# height = 6)
# Set up contrasts
emmeans_output = emmeans(model,
specs = ~ type_conn * day * disturbance ,
method = "pairwise",
adjust = "sidak",
bias.adj = TRUE,
lmer.df = "satterthwaite")
summary(emmeans_output)
zeros = rep(0, 8)
high_MM_conn = zeros; high_MM_conn[1] = 1
high_MM_unc = zeros; high_MM_unc[2] = 1
high_SL_conn = zeros; high_SL_conn[3] = 1
high_SL_unc = zeros; high_SL_unc[4] = 1
low_MM_conn = zeros; low_MM_conn[5] = 1
low_MM_unc = zeros; low_MM_unc[6] = 1
low_SL_conn = zeros; low_SL_conn[7] = 1
low_SL_unc = zeros; low_SL_unc[8] = 1
# Save contrasts in a table
results_table[[comb_i]] = contrast(
emmeans_output,
method = list("high SL_conn - SL_unc" = high_SL_conn - high_SL_unc,
"high MM_conn - MM_unc" = high_MM_conn - high_MM_unc,
"low SL_conn - SL_unc" = low_SL_conn - low_SL_unc,
"low MM_conn - MM_unc" = low_MM_conn - low_MM_unc)) %>%
as.data.frame() %>%
mutate(combination = comb_i,
system_nr_unconnected_systems = paste(system_nr_unconn_selected, collapse = ", "))
}
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model fitting failed with all optimizers."
## This model could not be fitted with any optimiser ( 1003 1009 1012 1016 1025 1027 1033 1039 1041 1048 1054 1056 1063 )
## [1] "Model fitting failed with all optimizers."
## This model could not be fitted with any optimiser ( 1001 1009 1013 1017 1025 1026 1035 1037 1043 1049 1050 1056 1063 )
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: Nelder_Mead "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: nloptwrap (NLOPT_LN_NELDERMEAD)"
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: Nelder_Mead "
## [1] "Model successfully fitted with optimizer: optimx (L-BFGS-B)"
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: nlminbwrap "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: nlminbwrap "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: nloptwrap (NLOPT_LN_NELDERMEAD)"
## [1] "Model fitting failed with all optimizers."
## This model could not be fitted with any optimiser ( 1001 1009 1013 1017 1025 1026 1033 1040 1042 1049 1053 1056 1064 )
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: nloptwrap (NLOPT_LN_NELDERMEAD)"
## [1] "Model successfully fitted with optimizer: Nelder_Mead "
## [1] "Model fitting failed with all optimizers."
## This model could not be fitted with any optimiser ( 1005 1006 1014 1018 1022 1027 1035 1038 1041 1050 1055 1056 1064 )
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: optimx (L-BFGS-B)"
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model fitting failed with all optimizers."
## This model could not be fitted with any optimiser ( 1003 1010 1014 1016 1022 1027 1035 1038 1041 1055 1050 1056 1064 )
## [1] "Model successfully fitted with optimizer: nloptwrap (NLOPT_LN_NELDERMEAD)"
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: nloptwrap (NLOPT_LN_NELDERMEAD)"
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model fitting failed with all optimizers."
## This model could not be fitted with any optimiser ( 1001 1010 1014 1018 1022 1028 1031 1040 1042 1048 1050 1056 1065 )
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model fitting failed with all optimizers."
## This model could not be fitted with any optimiser ( 1005 1007 1014 1018 1021 1026 1032 1040 1043 1051 1054 1056 1065 )
## [1] "Model fitting failed with all optimizers."
## This model could not be fitted with any optimiser ( 1002 1006 1014 1020 1023 1026 1032 1040 1043 1051 1054 1056 1065 )
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: nlminbwrap "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model fitting failed with all optimizers."
## This model could not be fitted with any optimiser ( 1005 1006 1013 1017 1024 1030 1031 1037 1043 1053 1046 1056 1065 )
## [1] "Model fitting failed with all optimizers."
## This model could not be fitted with any optimiser ( 1001 1010 1014 1018 1022 1026 1032 1038 1045 1053 1049 1056 1065 )
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model fitting failed with all optimizers."
## This model could not be fitted with any optimiser ( 1001 1007 1014 1020 1023 1026 1033 1037 1045 1055 1047 1056 1065 )
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: nlminbwrap "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model fitting failed with all optimizers."
## This model could not be fitted with any optimiser ( 1001 1010 1014 1018 1022 1030 1031 1038 1042 1049 1050 1057 1061 )
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: optimx (L-BFGS-B)"
## [1] "Model successfully fitted with optimizer: nloptwrap (NLOPT_LN_NELDERMEAD)"
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model fitting failed with all optimizers."
## This model could not be fitted with any optimiser ( 1002 1009 1013 1020 1021 1026 1034 1038 1042 1053 1049 1057 1061 )
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model fitting failed with all optimizers."
## This model could not be fitted with any optimiser ( 1002 1009 1013 1020 1021 1027 1034 1036 1043 1053 1052 1057 1061 )
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: Nelder_Mead "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: nloptwrap (NLOPT_LN_NELDERMEAD)"
## [1] "Model fitting failed with all optimizers."
## This model could not be fitted with any optimiser ( 1002 1009 1015 1016 1023 1026 1035 1038 1042 1048 1052 1057 1062 )
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model fitting failed with all optimizers."
## This model could not be fitted with any optimiser ( 1005 1009 1012 1018 1021 1028 1032 1039 1045 1050 1048 1057 1062 )
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model fitting failed with all optimizers."
## This model could not be fitted with any optimiser ( 1005 1009 1013 1016 1022 1028 1034 1040 1042 1051 1047 1057 1062 )
## [1] "Model fitting failed with all optimizers."
## This model could not be fitted with any optimiser ( 1005 1007 1013 1016 1024 1028 1032 1036 1045 1051 1047 1057 1062 )
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model fitting failed with all optimizers."
## This model could not be fitted with any optimiser ( 1001 1010 1012 1018 1024 1027 1034 1038 1041 1054 1048 1057 1062 )
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model fitting failed with all optimizers."
## This model could not be fitted with any optimiser ( 1001 1010 1014 1018 1022 1026 1035 1038 1042 1055 1050 1057 1062 )
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model fitting failed with all optimizers."
## This model could not be fitted with any optimiser ( 1002 1009 1015 1016 1023 1027 1033 1039 1041 1047 1051 1057 1065 )
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: nloptwrap (NLOPT_LN_NELDERMEAD)"
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: optimx (L-BFGS-B)"
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: optimx (L-BFGS-B)"
## [1] "Model fitting failed with all optimizers."
## This model could not be fitted with any optimiser ( 1002 1009 1015 1018 1021 1029 1033 1036 1042 1050 1048 1057 1065 )
## [1] "Model successfully fitted with optimizer: nloptwrap (NLOPT_LN_NELDERMEAD)"
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model fitting failed with all optimizers."
## This model could not be fitted with any optimiser ( 1002 1010 1013 1016 1024 1028 1031 1037 1045 1053 1046 1057 1065 )
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: nloptwrap (NLOPT_LN_NELDERMEAD)"
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: optimx (L-BFGS-B)"
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: optimx (L-BFGS-B)"
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: optimx (L-BFGS-B)"
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model fitting failed with all optimizers."
## This model could not be fitted with any optimiser ( 1005 1008 1014 1016 1022 1026 1033 1040 1042 1048 1054 1058 1060 )
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: Nelder_Mead "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model fitting failed with all optimizers."
## This model could not be fitted with any optimiser ( 1001 1009 1013 1020 1022 1027 1031 1038 1044 1051 1049 1058 1060 )
## [1] "Model fitting failed with all optimizers."
## This model could not be fitted with any optimiser ( 1001 1010 1014 1017 1023 1030 1031 1037 1043 1051 1054 1058 1060 )
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: nlminbwrap "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model fitting failed with all optimizers."
## This model could not be fitted with any optimiser ( 1005 1009 1012 1016 1023 1026 1035 1038 1042 1052 1048 1058 1060 )
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: Nelder_Mead "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: optimx (L-BFGS-B)"
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: nloptwrap (NLOPT_LN_NELDERMEAD)"
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: nlminbwrap "
## [1] "Model successfully fitted with optimizer: optimx (L-BFGS-B)"
## [1] "Model successfully fitted with optimizer: nloptwrap (NLOPT_LN_NELDERMEAD)"
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: nloptwrap (NLOPT_LN_NELDERMEAD)"
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model fitting failed with all optimizers."
## This model could not be fitted with any optimiser ( 1003 1007 1014 1016 1025 1028 1031 1040 1042 1049 1053 1058 1062 )
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: optimx (L-BFGS-B)"
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: optimx (L-BFGS-B)"
## [1] "Model successfully fitted with optimizer: optimx (L-BFGS-B)"
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model fitting failed with all optimizers."
## This model could not be fitted with any optimiser ( 1003 1007 1014 1016 1025 1026 1032 1038 1045 1050 1055 1058 1062 )
## [1] "Model successfully fitted with optimizer: nlminbwrap "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: optimx (L-BFGS-B)"
## [1] "Model successfully fitted with optimizer: optimx (L-BFGS-B)"
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: nloptwrap (NLOPT_LN_NELDERMEAD)"
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model fitting failed with all optimizers."
## This model could not be fitted with any optimiser ( 1005 1009 1013 1016 1022 1029 1032 1038 1041 1054 1046 1058 1062 )
## [1] "Model fitting failed with all optimizers."
## This model could not be fitted with any optimiser ( 1001 1008 1012 1020 1024 1026 1032 1038 1044 1054 1051 1058 1062 )
## [1] "Model fitting failed with all optimizers."
## This model could not be fitted with any optimiser ( 1005 1007 1013 1016 1024 1027 1031 1038 1045 1054 1051 1058 1062 )
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: nlminbwrap "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model fitting failed with all optimizers."
## This model could not be fitted with any optimiser ( 1003 1010 1012 1016 1024 1027 1031 1040 1043 1047 1051 1058 1064 )
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: nlminbwrap "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model fitting failed with all optimizers."
## This model could not be fitted with any optimiser ( 1005 1007 1014 1016 1023 1030 1033 1036 1042 1052 1053 1058 1064 )
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model fitting failed with all optimizers."
## This model could not be fitted with any optimiser ( 1003 1009 1015 1016 1022 1026 1035 1037 1043 1053 1052 1058 1064 )
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: optimx (L-BFGS-B)"
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: optimx (L-BFGS-B)"
## [1] "Model successfully fitted with optimizer: optimx (L-BFGS-B)"
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: optimx (L-BFGS-B)"
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model fitting failed with all optimizers."
## This model could not be fitted with any optimiser ( 1003 1009 1011 1017 1025 1026 1032 1038 1045 1046 1053 1059 1060 )
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model fitting failed with all optimizers."
## This model could not be fitted with any optimiser ( 1005 1006 1014 1018 1022 1029 1033 1036 1042 1047 1051 1059 1060 )
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: Nelder_Mead "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: nlminbwrap "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model fitting failed with all optimizers."
## This model could not be fitted with any optimiser ( 1005 1009 1012 1016 1023 1027 1033 1039 1041 1054 1051 1059 1060 )
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model fitting failed with all optimizers."
## This model could not be fitted with any optimiser ( 1005 1007 1013 1019 1021 1028 1034 1037 1041 1055 1047 1059 1060 )
## [1] "Model successfully fitted with optimizer: nloptwrap (NLOPT_LN_NELDERMEAD)"
## [1] "Model fitting failed with all optimizers."
## This model could not be fitted with any optimiser ( 1003 1007 1011 1020 1024 1029 1033 1037 1045 1055 1050 1059 1060 )
## [1] "Model fitting failed with all optimizers."
## This model could not be fitted with any optimiser ( 1004 1007 1015 1018 1021 1028 1032 1040 1041 1055 1050 1059 1060 )
## [1] "Model successfully fitted with optimizer: nloptwrap (NLOPT_LN_NELDERMEAD)"
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model fitting failed with all optimizers."
## This model could not be fitted with any optimiser ( 1001 1010 1012 1018 1024 1027 1035 1036 1043 1047 1055 1059 1061 )
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: optimx (L-BFGS-B)"
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: optimx (L-BFGS-B)"
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model fitting failed with all optimizers."
## This model could not be fitted with any optimiser ( 1003 1006 1014 1020 1022 1028 1031 1037 1045 1053 1046 1059 1061 )
## [1] "Model successfully fitted with optimizer: nloptwrap (NLOPT_LN_NELDERMEAD)"
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model fitting failed with all optimizers."
## This model could not be fitted with any optimiser ( 1001 1007 1014 1020 1023 1028 1031 1039 1042 1054 1051 1059 1061 )
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model fitting failed with all optimizers."
## This model could not be fitted with any optimiser ( 1005 1009 1013 1016 1022 1028 1031 1037 1045 1046 1053 1059 1063 )
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model fitting failed with all optimizers."
## This model could not be fitted with any optimiser ( 1003 1009 1011 1017 1025 1028 1031 1037 1044 1047 1052 1059 1063 )
## [1] "Model successfully fitted with optimizer: Nelder_Mead "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model fitting failed with all optimizers."
## This model could not be fitted with any optimiser ( 1002 1009 1011 1020 1023 1029 1032 1038 1041 1048 1054 1059 1063 )
## [1] "Model successfully fitted with optimizer: optimx (L-BFGS-B)"
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model fitting failed with all optimizers."
## This model could not be fitted with any optimiser ( 1003 1010 1012 1016 1024 1026 1032 1040 1043 1050 1055 1059 1063 )
## [1] "Model successfully fitted with optimizer: optimx (L-BFGS-B)"
## [1] "Model successfully fitted with optimizer: nloptwrap (NLOPT_LN_NELDERMEAD)"
## [1] "Model fitting failed with all optimizers."
## This model could not be fitted with any optimiser ( 1002 1010 1013 1016 1024 1028 1031 1040 1042 1051 1049 1059 1063 )
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model fitting failed with all optimizers."
## This model could not be fitted with any optimiser ( 1005 1009 1011 1018 1022 1027 1033 1036 1045 1051 1054 1059 1063 )
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model fitting failed with all optimizers."
## This model could not be fitted with any optimiser ( 1001 1009 1012 1018 1025 1030 1033 1036 1042 1052 1053 1059 1063 )
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model fitting failed with all optimizers."
## This model could not be fitted with any optimiser ( 1001 1009 1012 1018 1025 1026 1035 1037 1043 1054 1051 1059 1063 )
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: optimx (L-BFGS-B)"
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: optimx (L-BFGS-B)"
## [1] "Model fitting failed with all optimizers."
## This model could not be fitted with any optimiser ( 1003 1009 1015 1016 1022 1026 1033 1040 1042 1047 1051 1060 1058 )
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: Nelder_Mead "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model fitting failed with all optimizers."
## This model could not be fitted with any optimiser ( 1005 1009 1011 1018 1022 1028 1034 1037 1041 1051 1049 1060 1058 )
## [1] "Model successfully fitted with optimizer: optimx (L-BFGS-B)"
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model fitting failed with all optimizers."
## This model could not be fitted with any optimiser ( 1003 1009 1011 1020 1022 1030 1032 1036 1043 1053 1046 1060 1058 )
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: optimx (L-BFGS-B)"
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model fitting failed with all optimizers."
## This model could not be fitted with any optimiser ( 1002 1009 1013 1016 1025 1027 1033 1040 1041 1054 1051 1060 1058 )
## [1] "Model successfully fitted with optimizer: optimx (L-BFGS-B)"
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: Nelder_Mead "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: optimx (L-BFGS-B)"
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: optimx (L-BFGS-B)"
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model fitting failed with all optimizers."
## This model could not be fitted with any optimiser ( 1002 1008 1015 1016 1024 1030 1032 1036 1043 1051 1047 1060 1059 )
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model fitting failed with all optimizers."
## This model could not be fitted with any optimiser ( 1002 1009 1015 1018 1021 1030 1032 1036 1043 1051 1049 1060 1059 )
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model fitting failed with all optimizers."
## This model could not be fitted with any optimiser ( 1003 1006 1015 1017 1024 1028 1031 1037 1045 1051 1054 1060 1059 )
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model fitting failed with all optimizers."
## This model could not be fitted with any optimiser ( 1001 1009 1012 1018 1025 1027 1031 1040 1043 1051 1054 1060 1059 )
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model fitting failed with all optimizers."
## This model could not be fitted with any optimiser ( 1001 1007 1014 1020 1023 1027 1033 1036 1045 1055 1047 1060 1059 )
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model fitting failed with all optimizers."
## This model could not be fitted with any optimiser ( 1003 1006 1015 1017 1024 1027 1034 1036 1043 1055 1047 1060 1059 )
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model fitting failed with all optimizers."
## This model could not be fitted with any optimiser ( 1003 1007 1015 1016 1024 1028 1034 1036 1042 1046 1054 1060 1065 )
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model fitting failed with all optimizers."
## This model could not be fitted with any optimiser ( 1005 1009 1011 1017 1023 1028 1032 1036 1044 1047 1051 1060 1065 )
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: optimx (L-BFGS-B)"
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model fitting failed with all optimizers."
## This model could not be fitted with any optimiser ( 1003 1007 1015 1016 1024 1029 1032 1038 1041 1050 1049 1060 1065 )
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model fitting failed with all optimizers."
## This model could not be fitted with any optimiser ( 1005 1009 1011 1017 1023 1027 1033 1036 1045 1051 1047 1060 1065 )
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: optimx (L-BFGS-B)"
## [1] "Model fitting failed with all optimizers."
## This model could not be fitted with any optimiser ( 1003 1006 1012 1019 1025 1028 1035 1037 1041 1051 1054 1060 1065 )
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model fitting failed with all optimizers."
## This model could not be fitted with any optimiser ( 1002 1006 1015 1018 1024 1027 1035 1038 1041 1052 1053 1060 1065 )
## [1] "Model fitting failed with all optimizers."
## This model could not be fitted with any optimiser ( 1001 1007 1015 1018 1024 1026 1033 1037 1044 1053 1046 1060 1065 )
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: Nelder_Mead "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model fitting failed with all optimizers."
## This model could not be fitted with any optimiser ( 1001 1009 1015 1018 1022 1027 1033 1036 1044 1046 1053 1061 1057 )
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: optimx (L-BFGS-B)"
## [1] "Model fitting failed with all optimizers."
## This model could not be fitted with any optimiser ( 1003 1006 1012 1020 1024 1028 1032 1039 1041 1047 1051 1061 1057 )
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: optimx (L-BFGS-B)"
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model fitting failed with all optimizers."
## This model could not be fitted with any optimiser ( 1001 1007 1015 1018 1024 1028 1035 1039 1042 1051 1047 1061 1057 )
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model fitting failed with all optimizers."
## This model could not be fitted with any optimiser ( 1005 1009 1011 1017 1023 1026 1032 1040 1043 1051 1049 1061 1057 )
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model fitting failed with all optimizers."
## This model could not be fitted with any optimiser ( 1002 1006 1014 1018 1025 1030 1031 1037 1043 1051 1054 1061 1057 )
## [1] "Model successfully fitted with optimizer: optimx (L-BFGS-B)"
## [1] "Model fitting failed with all optimizers."
## This model could not be fitted with any optimiser ( 1001 1010 1013 1017 1024 1027 1031 1039 1043 1051 1054 1061 1057 )
## [1] "Model fitting failed with all optimizers."
## This model could not be fitted with any optimiser ( 1001 1007 1013 1020 1024 1030 1032 1036 1043 1051 1054 1061 1057 )
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model fitting failed with all optimizers."
## This model could not be fitted with any optimiser ( 1005 1009 1013 1017 1021 1029 1031 1037 1043 1053 1049 1061 1057 )
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model fitting failed with all optimizers."
## This model could not be fitted with any optimiser ( 1001 1009 1012 1018 1025 1026 1032 1038 1044 1054 1046 1061 1057 )
## [1] "Model successfully fitted with optimizer: nloptwrap (NLOPT_LN_NELDERMEAD)"
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: nloptwrap (NLOPT_LN_NELDERMEAD)"
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: optimx (L-BFGS-B)"
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: optimx (L-BFGS-B)"
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: optimx (L-BFGS-B)"
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model fitting failed with all optimizers."
## This model could not be fitted with any optimiser ( 1005 1007 1011 1018 1024 1028 1035 1036 1042 1053 1052 1061 1059 )
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: optimx (L-BFGS-B)"
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: nloptwrap (NLOPT_LN_NELDERMEAD)"
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model fitting failed with all optimizers."
## This model could not be fitted with any optimiser ( 1001 1009 1012 1018 1025 1028 1032 1036 1045 1048 1052 1061 1064 )
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: optimx (L-BFGS-B)"
## [1] "Model fitting failed with all optimizers."
## This model could not be fitted with any optimiser ( 1005 1007 1011 1018 1024 1030 1033 1037 1041 1050 1048 1061 1064 )
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: Nelder_Mead "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: optimx (L-BFGS-B)"
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model fitting failed with all optimizers."
## This model could not be fitted with any optimiser ( 1001 1009 1015 1018 1022 1028 1034 1040 1042 1047 1055 1062 1057 )
## [1] "Model fitting failed with all optimizers."
## This model could not be fitted with any optimiser ( 1002 1006 1014 1018 1025 1028 1034 1037 1041 1047 1055 1062 1057 )
## [1] "Model fitting failed with all optimizers."
## This model could not be fitted with any optimiser ( 1002 1006 1014 1018 1025 1030 1033 1037 1041 1049 1053 1062 1057 )
## [1] "Model successfully fitted with optimizer: optimx (L-BFGS-B)"
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model fitting failed with all optimizers."
## This model could not be fitted with any optimiser ( 1005 1008 1012 1016 1024 1030 1033 1036 1042 1051 1054 1062 1057 )
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model fitting failed with all optimizers."
## This model could not be fitted with any optimiser ( 1003 1007 1014 1020 1021 1026 1032 1039 1043 1053 1052 1062 1057 )
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model fitting failed with all optimizers."
## This model could not be fitted with any optimiser ( 1003 1006 1015 1019 1022 1030 1032 1036 1043 1053 1052 1062 1057 )
## [1] "Model fitting failed with all optimizers."
## This model could not be fitted with any optimiser ( 1004 1006 1012 1018 1025 1028 1031 1040 1042 1054 1046 1062 1057 )
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model fitting failed with all optimizers."
## This model could not be fitted with any optimiser ( 1001 1008 1015 1017 1024 1027 1031 1038 1045 1055 1050 1062 1057 )
## [1] "Model fitting failed with all optimizers."
## This model could not be fitted with any optimiser ( 1002 1009 1013 1020 1021 1026 1032 1038 1045 1046 1054 1062 1058 )
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model fitting failed with all optimizers."
## This model could not be fitted with any optimiser ( 1003 1009 1011 1017 1025 1027 1031 1038 1045 1047 1052 1062 1058 )
## [1] "Model successfully fitted with optimizer: nloptwrap (NLOPT_LN_NELDERMEAD)"
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model fitting failed with all optimizers."
## This model could not be fitted with any optimiser ( 1003 1007 1015 1016 1024 1030 1032 1038 1041 1048 1050 1062 1058 )
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: optimx (L-BFGS-B)"
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: nloptwrap (NLOPT_LN_NELDERMEAD)"
## [1] "Model fitting failed with all optimizers."
## This model could not be fitted with any optimiser ( 1005 1007 1014 1018 1021 1028 1031 1039 1042 1050 1055 1062 1058 )
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: optimx (L-BFGS-B)"
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model fitting failed with all optimizers."
## This model could not be fitted with any optimiser ( 1003 1006 1014 1017 1025 1030 1031 1037 1043 1055 1047 1062 1058 )
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: nloptwrap (NLOPT_LN_NELDERMEAD)"
## [1] "Model successfully fitted with optimizer: nloptwrap (NLOPT_LN_NELDERMEAD)"
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: optimx (L-BFGS-B)"
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model fitting failed with all optimizers."
## This model could not be fitted with any optimiser ( 1001 1010 1013 1017 1024 1028 1032 1040 1041 1049 1051 1062 1063 )
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: optimx (L-BFGS-B)"
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: nloptwrap (NLOPT_LN_NELDERMEAD)"
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: Nelder_Mead "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model fitting failed with all optimizers."
## This model could not be fitted with any optimiser ( 1002 1008 1011 1020 1024 1030 1032 1038 1041 1054 1051 1062 1063 )
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: Nelder_Mead "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: nloptwrap (NLOPT_LN_NELDERMEAD)"
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model fitting failed with all optimizers."
## This model could not be fitted with any optimiser ( 1004 1006 1013 1020 1022 1028 1031 1039 1042 1047 1051 1063 1056 )
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: nlminbwrap "
## [1] "Model successfully fitted with optimizer: optimx (L-BFGS-B)"
## [1] "Model fitting failed with all optimizers."
## This model could not be fitted with any optimiser ( 1005 1009 1013 1016 1022 1028 1032 1040 1041 1048 1054 1063 1056 )
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: optimx (L-BFGS-B)"
## [1] "Model successfully fitted with optimizer: optimx (L-BFGS-B)"
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: nloptwrap (NLOPT_LN_NELDERMEAD)"
## [1] "Model successfully fitted with optimizer: optimx (L-BFGS-B)"
## [1] "Model successfully fitted with optimizer: nloptwrap (NLOPT_LN_NELDERMEAD)"
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: nloptwrap (NLOPT_LN_NELDERMEAD)"
## [1] "Model successfully fitted with optimizer: optimx (L-BFGS-B)"
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: optimx (L-BFGS-B)"
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model fitting failed with all optimizers."
## This model could not be fitted with any optimiser ( 1003 1007 1015 1016 1024 1027 1031 1038 1044 1048 1054 1063 1059 )
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: optimx (L-BFGS-B)"
## [1] "Model successfully fitted with optimizer: optimx (L-BFGS-B)"
## [1] "Model fitting failed with all optimizers."
## This model could not be fitted with any optimiser ( 1002 1006 1013 1019 1025 1027 1033 1036 1045 1051 1054 1063 1059 )
## [1] "Model fitting failed with all optimizers."
## This model could not be fitted with any optimiser ( 1002 1009 1013 1020 1021 1028 1035 1037 1041 1052 1047 1063 1059 )
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model fitting failed with all optimizers."
## This model could not be fitted with any optimiser ( 1003 1009 1015 1017 1021 1028 1031 1037 1044 1052 1053 1063 1059 )
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model fitting failed with all optimizers."
## This model could not be fitted with any optimiser ( 1001 1009 1012 1020 1023 1028 1035 1037 1041 1046 1054 1063 1062 )
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model fitting failed with all optimizers."
## This model could not be fitted with any optimiser ( 1001 1009 1015 1017 1023 1030 1031 1038 1042 1047 1051 1063 1062 )
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: Nelder_Mead "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: nloptwrap (NLOPT_LN_NELDERMEAD)"
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: optimx (L-BFGS-B)"
## [1] "Model fitting failed with all optimizers."
## This model could not be fitted with any optimiser ( 1002 1009 1011 1018 1025 1029 1032 1038 1041 1053 1046 1063 1062 )
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: optimx (L-BFGS-B)"
## [1] "Model fitting failed with all optimizers."
## This model could not be fitted with any optimiser ( 1005 1006 1014 1018 1022 1029 1032 1036 1043 1054 1048 1063 1062 )
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model fitting failed with all optimizers."
## This model could not be fitted with any optimiser ( 1002 1009 1013 1016 1025 1026 1032 1039 1043 1046 1055 1064 1056 )
## [1] "Model successfully fitted with optimizer: nlminbwrap "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model fitting failed with all optimizers."
## This model could not be fitted with any optimiser ( 1005 1007 1014 1018 1021 1029 1032 1038 1045 1047 1055 1064 1056 )
## [1] "Model successfully fitted with optimizer: optimx (L-BFGS-B)"
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: Nelder_Mead "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model fitting failed with all optimizers."
## This model could not be fitted with any optimiser ( 1005 1007 1014 1018 1021 1030 1032 1036 1043 1052 1048 1064 1056 )
## [1] "Model successfully fitted with optimizer: nloptwrap (NLOPT_LN_NELDERMEAD)"
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: optimx (L-BFGS-B)"
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model fitting failed with all optimizers."
## This model could not be fitted with any optimiser ( 1005 1007 1011 1018 1024 1028 1031 1037 1044 1048 1050 1064 1058 )
## [1] "Model successfully fitted with optimizer: optimx (L-BFGS-B)"
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model fitting failed with all optimizers."
## This model could not be fitted with any optimiser ( 1005 1009 1012 1018 1021 1030 1032 1038 1044 1051 1047 1064 1058 )
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: nloptwrap (NLOPT_LN_NELDERMEAD)"
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: optimx (L-BFGS-B)"
## [1] "Model successfully fitted with optimizer: Nelder_Mead "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: nloptwrap (NLOPT_LN_NELDERMEAD)"
## [1] "Model fitting failed with all optimizers."
## This model could not be fitted with any optimiser ( 1002 1008 1015 1016 1024 1026 1033 1037 1044 1047 1051 1064 1061 )
## [1] "Model successfully fitted with optimizer: optimx (L-BFGS-B)"
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model fitting failed with all optimizers."
## This model could not be fitted with any optimiser ( 1005 1009 1012 1016 1023 1030 1033 1037 1041 1049 1050 1064 1061 )
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model fitting failed with all optimizers."
## This model could not be fitted with any optimiser ( 1003 1006 1014 1017 1025 1026 1032 1038 1045 1051 1047 1064 1061 )
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: nlminbwrap "
## [1] "Model fitting failed with all optimizers."
## This model could not be fitted with any optimiser ( 1002 1009 1011 1018 1025 1029 1033 1036 1042 1052 1047 1064 1061 )
## [1] "Model successfully fitted with optimizer: optimx (L-BFGS-B)"
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model fitting failed with all optimizers."
## This model could not be fitted with any optimiser ( 1003 1006 1012 1020 1024 1027 1035 1038 1041 1053 1052 1064 1061 )
## [1] "Model successfully fitted with optimizer: optimx (L-BFGS-B)"
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model fitting failed with all optimizers."
## This model could not be fitted with any optimiser ( 1001 1007 1015 1018 1024 1028 1032 1036 1045 1047 1052 1065 1056 )
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: optimx (L-BFGS-B)"
## [1] "Model successfully fitted with optimizer: Nelder_Mead "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model fitting failed with all optimizers."
## This model could not be fitted with any optimiser ( 1004 1007 1011 1020 1023 1026 1033 1037 1045 1051 1054 1065 1056 )
## [1] "Model fitting failed with all optimizers."
## This model could not be fitted with any optimiser ( 1005 1009 1013 1017 1021 1026 1032 1040 1043 1052 1047 1065 1056 )
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model fitting failed with all optimizers."
## This model could not be fitted with any optimiser ( 1003 1009 1011 1017 1025 1030 1033 1036 1042 1052 1047 1065 1056 )
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model fitting failed with all optimizers."
## This model could not be fitted with any optimiser ( 1001 1009 1015 1018 1022 1028 1032 1036 1045 1052 1053 1065 1056 )
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: nlminbwrap "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model fitting failed with all optimizers."
## This model could not be fitted with any optimiser ( 1001 1009 1015 1018 1022 1026 1032 1038 1045 1054 1051 1065 1056 )
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: nlminbwrap "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: nloptwrap (NLOPT_LN_NELDERMEAD)"
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: optimx (L-BFGS-B)"
## [1] "Model successfully fitted with optimizer: nlminbwrap "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model fitting failed with all optimizers."
## This model could not be fitted with any optimiser ( 1003 1007 1014 1016 1025 1026 1033 1037 1045 1049 1050 1065 1057 )
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model fitting failed with all optimizers."
## This model could not be fitted with any optimiser ( 1005 1007 1014 1016 1023 1028 1035 1037 1041 1050 1048 1065 1057 )
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: optimx (L-BFGS-B)"
## [1] "Model fitting failed with all optimizers."
## This model could not be fitted with any optimiser ( 1002 1009 1011 1020 1023 1030 1031 1038 1042 1050 1055 1065 1057 )
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model fitting failed with all optimizers."
## This model could not be fitted with any optimiser ( 1002 1008 1015 1019 1021 1030 1033 1036 1042 1051 1054 1065 1057 )
## [1] "Model fitting failed with all optimizers."
## This model could not be fitted with any optimiser ( 1005 1007 1014 1018 1021 1029 1032 1038 1041 1051 1054 1065 1057 )
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: optimx (L-BFGS-B)"
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model fitting failed with all optimizers."
## This model could not be fitted with any optimiser ( 1002 1006 1014 1018 1025 1026 1033 1040 1042 1053 1049 1065 1057 )
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: nlminbwrap "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model fitting failed with all optimizers."
## This model could not be fitted with any optimiser ( 1002 1008 1011 1020 1024 1026 1033 1040 1042 1055 1050 1065 1057 )
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: Nelder_Mead "
## [1] "Model fitting failed with all optimizers."
## This model could not be fitted with any optimiser ( 1001 1010 1013 1017 1024 1027 1035 1038 1041 1046 1054 1065 1060 )
## [1] "Model successfully fitted with optimizer: Nelder_Mead "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: Nelder_Mead "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: nloptwrap (NLOPT_LN_NELDERMEAD)"
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: Nelder_Mead "
## [1] "Model fitting failed with all optimizers."
## This model could not be fitted with any optimiser ( 1001 1009 1015 1018 1022 1027 1035 1038 1041 1051 1047 1065 1060 )
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: nlminbwrap "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model fitting failed with all optimizers."
## This model could not be fitted with any optimiser ( 1002 1009 1015 1018 1021 1030 1032 1036 1043 1053 1046 1065 1060 )
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model fitting failed with all optimizers."
## This model could not be fitted with any optimiser ( 1002 1006 1015 1018 1024 1030 1033 1036 1042 1053 1049 1065 1060 )
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: nloptwrap (NLOPT_LN_NELDERMEAD)"
## [1] "Model fitting failed with all optimizers."
## This model could not be fitted with any optimiser ( 1002 1009 1011 1020 1023 1030 1032 1038 1044 1054 1048 1065 1060 )
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model fitting failed with all optimizers."
## This model could not be fitted with any optimiser ( 1001 1008 1014 1020 1022 1029 1033 1036 1042 1054 1051 1065 1060 )
## [1] "Model fitting failed with all optimizers."
## This model could not be fitted with any optimiser ( 1005 1009 1013 1017 1021 1028 1032 1040 1044 1054 1051 1065 1060 )
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: bobyqa "
## [1] "Model successfully fitted with optimizer: optimx (L-BFGS-B)"
## [1] "Model successfully fitted with optimizer: bobyqa "
# Tell how many optimizers failed
print(paste((failed_optimizers/bootstrap_iterations * 100),
"% of combinations created models that could not be fit"))
## [1] "13.6 % of combinations created models that could not be fit"
# Bind rows within the list results_table
results_table = results_table %>%
bind_rows()
# Save the results
write.csv(results_table,
file = file_path,
row.names = F)
# --- READ THE RESULTS AFTER LENGTHY COMPUTATION --- #
results_table = read.csv(file = file_path)
# --- SHOW ITERATED RESULTS - P VALUE DISTRIBUTIONS --- #
results_table %>%
filter(contrast == "high MM_conn - MM_unc") %>%
pull(p.value) %>%
hist(main = "p values high MM_conn - MM_unc")
results_table %>%
filter(contrast == "high SL_conn - SL_unc") %>%
pull(p.value) %>%
hist(main = "p values high SL_conn - SL_unc")
results_table %>%
filter(contrast == "low MM_conn - MM_unc") %>%
pull(p.value) %>%
hist(main = "p values low MM_conn - MM_unc")
results_table %>%
filter(contrast == "low SL_conn - SL_unc") %>%
pull(p.value) %>%
hist(main = "p values low SL_conn vs SL_unc")
# --- SHOW ITERATED RESULTS - TABLE WITH AVERAGED VALUES --- #
results_table %>%
group_by(contrast) %>%
summarise(estimate = median(estimate),
SE = median(SE),
df = median(df),
t.ratio = median(t.ratio),
p.value = median(p.value)) %>%
as.data.frame() %>%
mutate(estimate = round(estimate, digits = 3),
SE = round(SE, digits = 3),
df = round(df, digits = 3),
t.ratio = round(t.ratio, digits = 3),
p.value = round(p.value, digits = 3),
evidence = "",
evidence = ifelse(p.value > 0.1,
"none",
evidence),
evidence = ifelse(p.value < 0.1,
"* weak",
evidence),
evidence = ifelse(p.value < 0.05,
"** moderate",
evidence),
evidence = ifelse(p.value < 0.01,
"*** strong",
evidence),
evidence = ifelse(p.value < 0.001,
"**** very strong",
evidence),
p.value = ifelse(p.value < 0.001,
"< 0.001",
p.value))
## contrast estimate SE df t.ratio p.value evidence
## 1 high MM_conn - MM_unc 17.326 13.109 33.314 1.325 0.194 none
## 2 high SL_conn - SL_unc -32.905 10.510 33.314 -3.106 0.004 *** strong
## 3 low MM_conn - MM_unc 38.759 13.109 33.314 2.945 0.006 *** strong
## 4 low SL_conn - SL_unc -21.448 9.909 33.314 -2.164 0.038 ** moderate
response_variable_selected = "shannon"
# --- ORIGINAL DATA - PLOT MEAN ± 95% CI --- #
p1 = plot.all.patches.points(data = ds_ecosystems %>%
filter(disturbance == "high"),
response_variable_selected) +
annotate("text",
x = Inf,
y = Inf,
label = "High disturbance",
hjust = 1.1,
vjust = 1.1,
size = 5,
fontface = "bold") + # Adjust position and style
theme(axis.title.x = element_blank(), # Remove x-axis title
axis.text.x = element_blank()) # Remove x-axis text
## Warning in qt(conf.interval/2 + 0.5, datac$N - 1): NaNs produced
p2 = plot.all.patches.points(data = ds_ecosystems %>%
filter(disturbance == "low"),
response_variable_selected) +
theme(legend.position = "none") +
annotate("text",
x = Inf,
y = Inf,
label = "Low disturbance",
hjust = 1.1,
vjust = 1.1,
size = 5,
fontface = "bold") # Adjust position and style
# Arrange the plots vertically
ggarrange(p1,
p2,
ncol = 1,
nrow = 2,
heights = c(1, 0.7))
# --- PREPARE DATA FOR ANALYSIS --- #
# Add baselines
baselines = ds_ecosystems %>%
filter(time_point == time_point_of_baselines) %>%
select(culture_ID,
all_of(response_variable_selected)) %>%
rename(baseline = all_of(response_variable_selected))
data_for_analysis = ds_ecosystems %>%
left_join(baselines)
# Filter data and change level names
data_for_analysis = data_for_analysis %>%
filter(time_point %in% time_points_model,
#!is.na(water_addition_ml),
!is.na(!!sym(response_variable_selected)),
!is.infinite(!!sym(response_variable_selected))) %>%
mutate(ecosystem_type = case_when(ecosystem_type == "Small unconnected" ~ "S",
ecosystem_type == "Medium unconnected" ~ "M",
ecosystem_type == "Large unconnected" ~ "L",
ecosystem_type == "Small connected to small" ~ "S_S",
ecosystem_type == "Small connected to large" ~ "S_L",
ecosystem_type == "Medium connected to medium" ~ "M_M",
ecosystem_type == "Large connected to large" ~ "L_L",
ecosystem_type == "Large connected to small" ~ "L_S",
TRUE ~ ecosystem_type)) %>%
ungroup()
# --- DATA FOR ANALYSIS - PLOT MEAN ± 95% CI --- #
p1 = plot.all.patches.points(data = data_for_analysis %>%
filter(disturbance == "high"),
response_variable_selected) +
annotate("text",
x = Inf,
y = Inf,
label = "High disturbance",
hjust = 1.1,
vjust = 1.1,
size = 5,
fontface = "bold") + # Adjust position and style
theme(axis.title.x = element_blank(), # Remove x-axis title
axis.text.x = element_blank()) # Remove x-axis text
## Warning in qt(conf.interval/2 + 0.5, datac$N - 1): NaNs produced
p2 = plot.all.patches.points(data = data_for_analysis %>%
filter(disturbance == "low"),
response_variable_selected) +
theme(legend.position = "none") +
annotate("text",
x = Inf,
y = Inf,
label = "Low disturbance",
hjust = 1.1,
vjust = 1.1,
size = 5,
fontface = "bold") # Adjust position and style
# Arrange the plots vertically
ggarrange(p1,
p2,
ncol = 1,
nrow = 2,
heights = c(1, 0.7))
# --- MODEL - CONSTRUCT --- #
# Define formula
formula = paste("get(response_variable_selected) ~",
"ecosystem_type * disturbance * day +",
"(day | culture_ID) +",
"(day | baseline)") %>%
print()
## [1] "get(response_variable_selected) ~ ecosystem_type * disturbance * day + (day | culture_ID) + (day | baseline)"
# Construct model
model = try.different.optimizers.ecos(data_for_analysis,
formula)
## [1] "Model successfully fitted with optimizer: bobyqa "
# --- MODEL - SUMMARY --- #
print(summary(model), digits = 1)
## Linear mixed model fit by maximum likelihood . t-tests use Satterthwaite's
## method [lmerModLmerTest]
## Formula: formula
## Data: data
## Control: lmerControl(optimizer = opt$optimizer)
##
## AIC BIC logLik deviance df.resid
## 352.4 526.4 -137.2 274.4 602
##
## Scaled residuals:
## Min 1Q Median 3Q Max
## -3.5 -0.6 0.0 0.7 3.7
##
## Random effects:
## Groups Name Variance Std.Dev. Corr
## culture_ID (Intercept) 0e+00 0e+00
## day 1e-18 1e-09 NaN
## baseline (Intercept) 4e-11 7e-06
## day 2e-13 5e-07 -1.00
## Residual 9e-02 3e-01
## Number of obs: 641, groups: culture_ID, 109; baseline, 109
##
## Fixed effects:
## Estimate Std. Error df t value
## (Intercept) 1e+00 2e-01 6e+02 9.6
## ecosystem_typeL_L -6e-02 2e-01 6e+02 -0.3
## ecosystem_typeL_S -3e-01 2e-01 6e+02 -1.2
## ecosystem_typeM -3e-01 2e-01 6e+02 -1.3
## ecosystem_typeM_M -6e-02 2e-01 6e+02 -0.3
## ecosystem_typeS 1e-01 2e-01 6e+02 0.6
## ecosystem_typeS_L 5e-01 2e-01 6e+02 2.5
## ecosystem_typeS_S 3e-01 2e-01 6e+02 1.7
## disturbancelow 4e-01 2e-01 6e+02 1.6
## day -7e-04 8e-03 6e+02 -0.1
## ecosystem_typeL_L:disturbancelow -3e-01 3e-01 6e+02 -1.0
## ecosystem_typeL_S:disturbancelow -3e-01 3e-01 6e+02 -0.9
## ecosystem_typeM:disturbancelow -4e-01 3e-01 6e+02 -1.2
## ecosystem_typeM_M:disturbancelow -4e-01 3e-01 6e+02 -1.5
## ecosystem_typeS:disturbancelow -2e-01 3e-01 6e+02 -0.6
## ecosystem_typeS_L:disturbancelow -1e+00 3e-01 6e+02 -3.2
## ecosystem_typeS_S:disturbancelow -4e-01 3e-01 6e+02 -1.6
## ecosystem_typeL_L:day 2e-03 1e-02 6e+02 0.2
## ecosystem_typeL_S:day 8e-03 1e-02 6e+02 0.7
## ecosystem_typeM:day 3e-03 1e-02 6e+02 0.3
## ecosystem_typeM_M:day -2e-03 1e-02 6e+02 -0.2
## ecosystem_typeS:day -7e-02 1e-02 6e+02 -5.1
## ecosystem_typeS_L:day -6e-02 1e-02 6e+02 -5.7
## ecosystem_typeS_S:day -7e-02 1e-02 6e+02 -6.5
## disturbancelow:day -2e-02 1e-02 6e+02 -1.4
## ecosystem_typeL_L:disturbancelow:day 1e-02 1e-02 6e+02 0.8
## ecosystem_typeL_S:disturbancelow:day 2e-02 2e-02 6e+02 1.0
## ecosystem_typeM:disturbancelow:day 2e-02 2e-02 6e+02 1.4
## ecosystem_typeM_M:disturbancelow:day 2e-02 1e-02 6e+02 1.3
## ecosystem_typeS:disturbancelow:day 4e-02 2e-02 6e+02 2.3
## ecosystem_typeS_L:disturbancelow:day 8e-02 2e-02 6e+02 5.0
## ecosystem_typeS_S:disturbancelow:day 4e-02 1e-02 6e+02 2.8
## Pr(>|t|)
## (Intercept) <2e-16 ***
## ecosystem_typeL_L 0.732
## ecosystem_typeL_S 0.240
## ecosystem_typeM 0.205
## ecosystem_typeM_M 0.741
## ecosystem_typeS 0.579
## ecosystem_typeS_L 0.014 *
## ecosystem_typeS_S 0.086 .
## disturbancelow 0.109
## day 0.930
## ecosystem_typeL_L:disturbancelow 0.318
## ecosystem_typeL_S:disturbancelow 0.361
## ecosystem_typeM:disturbancelow 0.228
## ecosystem_typeM_M:disturbancelow 0.128
## ecosystem_typeS:disturbancelow 0.555
## ecosystem_typeS_L:disturbancelow 0.002 **
## ecosystem_typeS_S:disturbancelow 0.119
## ecosystem_typeL_L:day 0.810
## ecosystem_typeL_S:day 0.485
## ecosystem_typeM:day 0.781
## ecosystem_typeM_M:day 0.836
## ecosystem_typeS:day 4e-07 ***
## ecosystem_typeS_L:day 2e-08 ***
## ecosystem_typeS_S:day 2e-10 ***
## disturbancelow:day 0.149
## ecosystem_typeL_L:disturbancelow:day 0.429
## ecosystem_typeL_S:disturbancelow:day 0.295
## ecosystem_typeM:disturbancelow:day 0.155
## ecosystem_typeM_M:disturbancelow:day 0.196
## ecosystem_typeS:disturbancelow:day 0.022 *
## ecosystem_typeS_L:disturbancelow:day 6e-07 ***
## ecosystem_typeS_S:disturbancelow:day 0.005 **
## ---
## Signif. codes: 0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1
## optimizer (bobyqa) convergence code: 0 (OK)
## boundary (singular) fit: see help('isSingular')
# --- MODEL - ANOVA --- #
car::Anova(model, type = "III")
## Analysis of Deviance Table (Type III Wald chisquare tests)
##
## Response: get(response_variable_selected)
## Chisq Df Pr(>Chisq)
## (Intercept) 92.1969 1 < 2.2e-16 ***
## ecosystem_type 26.7429 7 0.0003707 ***
## disturbance 2.5795 1 0.1082569
## day 0.0077 1 0.9302887
## ecosystem_type:disturbance 11.9786 7 0.1012636
## ecosystem_type:day 155.7995 7 < 2.2e-16 ***
## disturbance:day 2.0906 1 0.1482065
## ecosystem_type:disturbance:day 37.7566 7 3.371e-06 ***
## ---
## Signif. codes: 0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1
# --- ESTIMATE MARGINAL MEANS --- #
emmeans_output = emmeans(model,
specs = ~ ecosystem_type * disturbance * day,
adjust = "sidak",
bias.adj = TRUE,
lmer.df = "satterthwaite")
emmeans_output
## ecosystem_type disturbance day emmean SE df lower.CL upper.CL
## L high 17.8 1.468 0.0547 641 1.306 1.630
## L_L high 17.8 1.446 0.0387 641 1.331 1.560
## L_S high 17.8 1.353 0.0547 641 1.191 1.515
## M high 17.8 1.248 0.0547 641 1.086 1.410
## M_M high 17.8 1.370 0.0387 641 1.255 1.484
## S high 17.8 0.336 0.0736 641 0.118 0.554
## S_L high 17.8 0.854 0.0547 641 0.692 1.016
## S_S high 17.8 0.621 0.0412 641 0.499 0.743
## L low 17.8 1.526 0.0547 641 1.364 1.688
## L_L low 17.8 1.433 0.0387 641 1.318 1.547
## L_S low 17.8 1.429 0.0547 641 1.267 1.591
## M low 17.8 1.341 0.0547 641 1.179 1.503
## M_M low 17.8 1.341 0.0387 641 1.226 1.455
## S low 17.8 0.939 0.0557 641 0.774 1.104
## S_L low 17.8 1.373 0.0547 641 1.211 1.535
## S_S low 17.8 0.966 0.0387 641 0.852 1.081
##
## Degrees-of-freedom method: satterthwaite
## Results are given on the get (not the response) scale.
## Confidence level used: 0.95
## Conf-level adjustment: sidak method for 16 estimates
# --- CODE EACH LEVEL OF DISTURBANCE AND ECOSYSTEM TYPE TO THEN PRODUCE CONTRASTS --- #
high_L = c(1, rep(0,15))
high_L_L = c(rep(0,1), 1, rep(0,14))
high_L_S = c(rep(0,2), 1, rep(0,13))
high_M = c(rep(0,3), 1, rep(0,12))
high_M_M = c(rep(0,4), 1, rep(0,11))
high_S = c(rep(0,5), 1, rep(0,10))
high_S_L = c(rep(0,6), 1, rep(0,9))
high_S_S = c(rep(0,7), 1, rep(0,8))
low_L = c(rep(0,8), 1, rep(0,7))
low_L_L = c(rep(0,9), 1, rep(0,6))
low_L_S = c(rep(0,10), 1, rep(0,5))
low_M = c(rep(0,11), 1, rep(0,4))
low_M_M = c(rep(0,12), 1, rep(0,3))
low_S = c(rep(0,13), 1, rep(0,2))
low_S_L = c(rep(0,14), 1, rep(0,1))
low_S_S = c(rep(0,15), 1)
# --- PRODUCE ECOSYSTEM TYPE CONTRASTS --- #
# Set parameters
n_of_digits = 3
# Compute constrasts
contrasts = contrast(emmeans_output,
method = list("high L - S" = high_L - high_S,
"high M - S" = high_M - high_S,
"low L - S" = low_L - low_S,
"low M - S" = low_M - low_S)) %>%
as.data.frame() %>%
mutate(p.value = round(p.value, digits = n_of_digits),
estimate = round(estimate, digits = n_of_digits),
SE = round(SE, digits = n_of_digits),
df = round(df, digits = n_of_digits),
t.ratio = round(t.ratio, digits = n_of_digits),
e = "",
e = ifelse(p.value > 0.1,
"",
e),
e = ifelse(p.value < 0.05,
"*",
e),
e = ifelse(p.value < 0.01,
"**",
e),
e = ifelse(p.value < 0.001,
"***",
e)) %>%
rename(" " = e)
# --- PRODUCE ECOSYSTEM TYPE CONTRASTS --- #
# Set parameters
n_of_digits = 3
# Compute constrasts
contrasts = contrast(emmeans_output,
method = list("high S_L - S" = high_S_L - high_S,
"high S_L - S_S" = high_S_L - high_S_S,
"high S_S - S" = high_S_S - high_S,
"high M_M - M" = high_M_M - high_M,
"high L_S - L" = high_L_S - high_L,
"high L_S - L_L" = high_L_S - high_L_L,
"high L_L - L" = high_L_L - high_L,
"low S_L - S" = low_S_L - low_S,
"low S_L - S_S" = low_S_L - low_S_S,
"low S_S - S" = low_S_S - low_S,
"low M_M - M" = low_M_M - low_M,
"low L_S - L" = low_L_S - low_L,
"low L_S - L_L" = low_L_S - low_L_L,
"low L_L - L" = low_L_L - low_L)) %>%
as.data.frame() %>%
mutate(p.value = round(p.value, digits = n_of_digits),
estimate = round(estimate, digits = n_of_digits),
SE = round(SE, digits = n_of_digits),
df = round(df, digits = n_of_digits),
t.ratio = round(t.ratio, digits = n_of_digits),
e = "",
e = ifelse(p.value > 0.1,
"",
e),
e = ifelse(p.value < 0.05,
"*",
e),
e = ifelse(p.value < 0.01,
"**",
e),
e = ifelse(p.value < 0.001,
"***",
e)) %>%
rename(" " = e)
# --- SHOW ECOSYSTEM TYPE CONSTRASTS --- #
contrasts
## contrast estimate SE df t.ratio p.value
## 1 high S_L - S 0.518 0.092 641 5.647 0.000 ***
## 2 high S_L - S_S 0.233 0.068 641 3.399 0.001 **
## 3 high S_S - S 0.285 0.084 641 3.382 0.001 **
## 4 high M_M - M 0.122 0.067 641 1.817 0.070
## 5 high L_S - L -0.115 0.077 641 -1.490 0.137
## 6 high L_S - L_L -0.093 0.067 641 -1.384 0.167
## 7 high L_L - L -0.023 0.067 641 -0.337 0.736
## 8 low S_L - S 0.434 0.078 641 5.562 0.000 ***
## 9 low S_L - S_S 0.407 0.067 641 6.071 0.000 ***
## 10 low S_S - S 0.027 0.068 641 0.402 0.688
## 11 low M_M - M -0.001 0.067 641 -0.009 0.993
## 12 low L_S - L -0.098 0.077 641 -1.261 0.208
## 13 low L_S - L_L -0.004 0.067 641 -0.060 0.952
## 14 low L_L - L -0.094 0.067 641 -1.395 0.163
# --- PLOT MODEL RESIDUALS --- #
# Simulate residuals from the fitted model
simulationOutput <- simulateResiduals(fittedModel = model,
n = 1000)
# Test for overdispersion in the model to check if variance exceeds mean
testDispersion(simulationOutput)
##
## DHARMa nonparametric dispersion test via sd of residuals fitted vs.
## simulated
##
## data: simulationOutput
## dispersion = 1.0037, p-value = 0.948
## alternative hypothesis: two.sided
# Plot Q-Q and Residuals vs Fitted plots
plot(simulationOutput)
response_variable_selected = "bioarea_mm2_per_ml"
# --- ORIGINAL DATA - PLOT MEAN ± 95% CI --- #
p1 = plot.all.patches.points(data = ds_ecosystems %>%
filter(disturbance == "high"),
response_variable_selected) +
annotate("text",
x = Inf,
y = Inf,
label = "High disturbance",
hjust = 1.1,
vjust = 1.1,
size = 5,
fontface = "bold") + # Adjust position and style
theme(axis.title.x = element_blank(), # Remove x-axis title
axis.text.x = element_blank()) # Remove x-axis text
p2 = plot.all.patches.points(data = ds_ecosystems %>%
filter(disturbance == "low"),
response_variable_selected) +
theme(legend.position = "none") +
annotate("text",
x = Inf,
y = Inf,
label = "Low disturbance",
hjust = 1.1,
vjust = 1.1,
size = 5,
fontface = "bold") # Adjust position and style
# Arrange the plots vertically
ggarrange(p1,
p2,
ncol = 1,
nrow = 2,
heights = c(1, 0.7))
# --- PREPARE DATA FOR ANALYSIS --- #
# Add baselines
baselines = ds_ecosystems %>%
filter(time_point == time_point_of_baselines) %>%
select(culture_ID,
all_of(response_variable_selected)) %>%
rename(baseline = all_of(response_variable_selected))
data_for_analysis = ds_ecosystems %>%
left_join(baselines)
# Filter data and change level names
data_for_analysis = data_for_analysis %>%
filter(time_point %in% time_points_model,
#!is.na(water_addition_ml),
!is.na(!!sym(response_variable_selected)),
!is.infinite(!!sym(response_variable_selected))) %>%
mutate(ecosystem_type = case_when(ecosystem_type == "Small unconnected" ~ "S",
ecosystem_type == "Medium unconnected" ~ "M",
ecosystem_type == "Large unconnected" ~ "L",
ecosystem_type == "Small connected to small" ~ "S_S",
ecosystem_type == "Small connected to large" ~ "S_L",
ecosystem_type == "Medium connected to medium" ~ "M_M",
ecosystem_type == "Large connected to large" ~ "L_L",
ecosystem_type == "Large connected to small" ~ "L_S",
TRUE ~ ecosystem_type)) %>%
ungroup()
# --- DATA FOR ANALYSIS - PLOT MEAN ± 95% CI --- #
p1 = plot.all.patches.points(data = data_for_analysis %>%
filter(disturbance == "high"),
response_variable_selected) +
annotate("text",
x = Inf,
y = Inf,
label = "High disturbance",
hjust = 1.1,
vjust = 1.1,
size = 5,
fontface = "bold") + # Adjust position and style
theme(axis.title.x = element_blank(), # Remove x-axis title
axis.text.x = element_blank()) # Remove x-axis text
p2 = plot.all.patches.points(data = data_for_analysis %>%
filter(disturbance == "low"),
response_variable_selected) +
theme(legend.position = "none") +
annotate("text",
x = Inf,
y = Inf,
label = "Low disturbance",
hjust = 1.1,
vjust = 1.1,
size = 5,
fontface = "bold") # Adjust position and style
# Arrange the plots vertically
ggarrange(p1,
p2,
ncol = 1,
nrow = 2,
heights = c(1, 0.7))
# --- MODEL - CONSTRUCT --- #
# Define formula
formula = paste("get(response_variable_selected) ~",
"ecosystem_type * disturbance * day +",
"(day | culture_ID) +",
"(day | baseline)") %>%
print()
## [1] "get(response_variable_selected) ~ ecosystem_type * disturbance * day + (day | culture_ID) + (day | baseline)"
# Construct model
model = try.different.optimizers.ecos(data_for_analysis,
formula)
## [1] "Model successfully fitted with optimizer: bobyqa "
# --- MODEL - SUMMARY --- #
print(summary(model), digits = 1)
## Linear mixed model fit by maximum likelihood . t-tests use Satterthwaite's
## method [lmerModLmerTest]
## Formula: formula
## Data: data
## Control: lmerControl(optimizer = opt$optimizer)
##
## AIC BIC logLik deviance df.resid
## 1639.4 1814.3 -780.7 1561.4 615
##
## Scaled residuals:
## Min 1Q Median 3Q Max
## -3.1 -0.6 0.0 0.5 3.6
##
## Random effects:
## Groups Name Variance Std.Dev. Corr
## culture_ID (Intercept) 2e-01 0.39
## day 2e-04 0.01 -1.00
## baseline (Intercept) 1e-01 0.34
## day 2e-04 0.01 -1.00
## Residual 6e-01 0.77
## Number of obs: 654, groups: culture_ID, 109; baseline, 109
##
## Fixed effects:
## Estimate Std. Error df t value
## (Intercept) 5.94 0.46 122.57 12.9
## ecosystem_typeL_L -0.15 0.56 122.57 -0.3
## ecosystem_typeL_S -1.18 0.65 122.57 -1.8
## ecosystem_typeM -1.63 0.65 122.57 -2.5
## ecosystem_typeM_M -0.26 0.56 122.57 -0.5
## ecosystem_typeS -4.14 0.69 122.57 -6.0
## ecosystem_typeS_L -2.90 0.65 122.57 -4.5
## ecosystem_typeS_S -3.72 0.56 122.57 -6.6
## disturbancelow -0.99 0.65 122.57 -1.5
## day -0.16 0.02 167.67 -7.2
## ecosystem_typeL_L:disturbancelow 0.68 0.80 122.57 0.9
## ecosystem_typeL_S:disturbancelow 1.68 0.92 122.57 1.8
## ecosystem_typeM:disturbancelow 1.80 0.92 122.57 2.0
## ecosystem_typeM_M:disturbancelow 1.35 0.80 122.57 1.7
## ecosystem_typeS:disturbancelow 3.12 0.95 122.57 3.3
## ecosystem_typeS_L:disturbancelow 1.90 0.92 122.57 2.1
## ecosystem_typeS_S:disturbancelow 2.37 0.80 122.57 3.0
## ecosystem_typeL_L:day -0.01 0.03 167.67 -0.3
## ecosystem_typeL_S:day 0.01 0.03 167.67 0.4
## ecosystem_typeM:day 0.02 0.03 167.67 0.7
## ecosystem_typeM_M:day -0.03 0.03 167.67 -1.1
## ecosystem_typeS:day 0.09 0.03 167.67 2.5
## ecosystem_typeS_L:day 0.04 0.03 167.67 1.3
## ecosystem_typeS_S:day 0.07 0.03 167.67 2.7
## disturbancelow:day 0.07 0.03 167.67 2.2
## ecosystem_typeL_L:disturbancelow:day -0.05 0.04 167.67 -1.3
## ecosystem_typeL_S:disturbancelow:day -0.08 0.04 167.67 -1.7
## ecosystem_typeM:disturbancelow:day -0.10 0.04 167.67 -2.3
## ecosystem_typeM_M:disturbancelow:day -0.05 0.04 167.67 -1.4
## ecosystem_typeS:disturbancelow:day -0.15 0.05 167.67 -3.2
## ecosystem_typeS_L:disturbancelow:day -0.08 0.04 167.67 -1.8
## ecosystem_typeS_S:disturbancelow:day -0.12 0.04 167.67 -3.1
## Pr(>|t|)
## (Intercept) <2e-16 ***
## ecosystem_typeL_L 0.789
## ecosystem_typeL_S 0.073 .
## ecosystem_typeM 0.013 *
## ecosystem_typeM_M 0.643
## ecosystem_typeS 2e-08 ***
## ecosystem_typeS_L 2e-05 ***
## ecosystem_typeS_S 1e-09 ***
## disturbancelow 0.130
## day 2e-11 ***
## ecosystem_typeL_L:disturbancelow 0.395
## ecosystem_typeL_S:disturbancelow 0.070 .
## ecosystem_typeM:disturbancelow 0.052 .
## ecosystem_typeM_M:disturbancelow 0.093 .
## ecosystem_typeS:disturbancelow 0.001 **
## ecosystem_typeS_L:disturbancelow 0.041 *
## ecosystem_typeS_S:disturbancelow 0.004 **
## ecosystem_typeL_L:day 0.728
## ecosystem_typeL_S:day 0.695
## ecosystem_typeM:day 0.474
## ecosystem_typeM_M:day 0.258
## ecosystem_typeS:day 0.012 *
## ecosystem_typeS_L:day 0.203
## ecosystem_typeS_S:day 0.009 **
## disturbancelow:day 0.026 *
## ecosystem_typeL_L:disturbancelow:day 0.213
## ecosystem_typeL_S:disturbancelow:day 0.086 .
## ecosystem_typeM:disturbancelow:day 0.021 *
## ecosystem_typeM_M:disturbancelow:day 0.163
## ecosystem_typeS:disturbancelow:day 0.002 **
## ecosystem_typeS_L:disturbancelow:day 0.081 .
## ecosystem_typeS_S:disturbancelow:day 0.002 **
## ---
## Signif. codes: 0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1
## optimizer (bobyqa) convergence code: 0 (OK)
## boundary (singular) fit: see help('isSingular')
# --- MODEL - ANOVA --- #
car::Anova(model, type = "III")
## Analysis of Deviance Table (Type III Wald chisquare tests)
##
## Response: get(response_variable_selected)
## Chisq Df Pr(>Chisq)
## (Intercept) 166.6820 1 < 2.2e-16 ***
## ecosystem_type 124.6134 7 < 2.2e-16 ***
## disturbance 2.3252 1 0.12729
## day 51.2279 1 8.224e-13 ***
## ecosystem_type:disturbance 18.2609 7 0.01085 *
## ecosystem_type:day 33.5306 7 2.108e-05 ***
## disturbance:day 5.0218 1 0.02503 *
## ecosystem_type:disturbance:day 17.2032 7 0.01613 *
## ---
## Signif. codes: 0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1
# --- ESTIMATE MARGINAL MEANS --- #
emmeans_output = emmeans(model,
specs = ~ ecosystem_type * disturbance * day,
adjust = "sidak",
bias.adj = TRUE,
lmer.df = "satterthwaite")
emmeans_output
## ecosystem_type disturbance day emmean SE df lower.CL upper.CL
## L high 18 3.061 0.161 127 2.576 3.547
## L_L high 18 2.739 0.114 127 2.396 3.082
## L_S high 18 2.108 0.161 127 1.623 2.593
## M high 18 1.838 0.161 127 1.352 2.323
## M_M high 18 2.241 0.114 127 1.898 2.584
## S high 18 0.448 0.181 127 -0.094 0.991
## S_L high 18 0.891 0.161 127 0.406 1.376
## S_S high 18 0.647 0.114 127 0.304 0.990
## L low 18 3.343 0.161 127 2.858 3.828
## L_L low 18 2.831 0.114 127 2.488 3.174
## L_S low 18 2.682 0.161 127 2.196 3.167
## M low 18 2.057 0.161 127 1.572 2.543
## M_M low 18 2.898 0.114 127 2.555 3.242
## S low 18 1.214 0.161 127 0.728 1.699
## S_L low 18 1.662 0.161 127 1.176 2.147
## S_S low 18 1.154 0.114 127 0.811 1.497
##
## Degrees-of-freedom method: satterthwaite
## Results are given on the get (not the response) scale.
## Confidence level used: 0.95
## Conf-level adjustment: sidak method for 16 estimates
# --- CODE EACH LEVEL OF DISTURBANCE AND ECOSYSTEM TYPE TO THEN PRODUCE CONTRASTS --- #
high_L = c(1, rep(0,15))
high_L_L = c(rep(0,1), 1, rep(0,14))
high_L_S = c(rep(0,2), 1, rep(0,13))
high_M = c(rep(0,3), 1, rep(0,12))
high_M_M = c(rep(0,4), 1, rep(0,11))
high_S = c(rep(0,5), 1, rep(0,10))
high_S_L = c(rep(0,6), 1, rep(0,9))
high_S_S = c(rep(0,7), 1, rep(0,8))
low_L = c(rep(0,8), 1, rep(0,7))
low_L_L = c(rep(0,9), 1, rep(0,6))
low_L_S = c(rep(0,10), 1, rep(0,5))
low_M = c(rep(0,11), 1, rep(0,4))
low_M_M = c(rep(0,12), 1, rep(0,3))
low_S = c(rep(0,13), 1, rep(0,2))
low_S_L = c(rep(0,14), 1, rep(0,1))
low_S_S = c(rep(0,15), 1)
# --- PRODUCE ECOSYSTEM TYPE CONTRASTS --- #
# Set parameters
n_of_digits = 3
# Compute constrasts
contrasts = contrast(emmeans_output,
method = list("high L - S" = high_L - high_S,
"high M - S" = high_M - high_S,
"low L - S" = low_L - low_S,
"low M - S" = low_M - low_S)) %>%
as.data.frame() %>%
mutate(p.value = round(p.value, digits = n_of_digits),
estimate = round(estimate, digits = n_of_digits),
SE = round(SE, digits = n_of_digits),
df = round(df, digits = n_of_digits),
t.ratio = round(t.ratio, digits = n_of_digits),
e = "",
e = ifelse(p.value > 0.1,
"",
e),
e = ifelse(p.value < 0.05,
"*",
e),
e = ifelse(p.value < 0.01,
"**",
e),
e = ifelse(p.value < 0.001,
"***",
e)) %>%
rename(" " = e)
# --- PRODUCE ECOSYSTEM TYPE CONTRASTS --- #
# Set parameters
n_of_digits = 3
# Compute constrasts
contrasts = contrast(emmeans_output,
method = list("high S_L - S" = high_S_L - high_S,
"high S_L - S_S" = high_S_L - high_S_S,
"high S_S - S" = high_S_S - high_S,
"high M_M - M" = high_M_M - high_M,
"high L_S - L" = high_L_S - high_L,
"high L_S - L_L" = high_L_S - high_L_L,
"high L_L - L" = high_L_L - high_L,
"low S_L - S" = low_S_L - low_S,
"low S_L - S_S" = low_S_L - low_S_S,
"low S_S - S" = low_S_S - low_S,
"low M_M - M" = low_M_M - low_M,
"low L_S - L" = low_L_S - low_L,
"low L_S - L_L" = low_L_S - low_L_L,
"low L_L - L" = low_L_L - low_L)) %>%
as.data.frame() %>%
mutate(p.value = round(p.value, digits = n_of_digits),
estimate = round(estimate, digits = n_of_digits),
SE = round(SE, digits = n_of_digits),
df = round(df, digits = n_of_digits),
t.ratio = round(t.ratio, digits = n_of_digits),
e = "",
e = ifelse(p.value > 0.1,
"",
e),
e = ifelse(p.value < 0.05,
"*",
e),
e = ifelse(p.value < 0.01,
"**",
e),
e = ifelse(p.value < 0.001,
"***",
e)) %>%
rename(" " = e)
# --- SHOW ECOSYSTEM TYPE CONSTRASTS --- #
contrasts
## contrast estimate SE df t.ratio p.value
## 1 high S_L - S 0.442 0.242 126.838 1.826 0.070
## 2 high S_L - S_S 0.244 0.198 126.838 1.233 0.220
## 3 high S_S - S 0.199 0.214 126.838 0.930 0.354
## 4 high M_M - M 0.404 0.198 126.838 2.041 0.043 *
## 5 high L_S - L -0.953 0.228 126.838 -4.175 0.000 ***
## 6 high L_S - L_L -0.631 0.198 126.838 -3.190 0.002 **
## 7 high L_L - L -0.322 0.198 126.838 -1.631 0.105
## 8 low S_L - S 0.448 0.228 126.838 1.962 0.052
## 9 low S_L - S_S 0.507 0.198 126.838 2.565 0.011 *
## 10 low S_S - S -0.059 0.198 126.838 -0.300 0.765
## 11 low M_M - M 0.841 0.198 126.838 4.253 0.000 ***
## 12 low L_S - L -0.662 0.228 126.838 -2.897 0.004 **
## 13 low L_S - L_L -0.149 0.198 126.838 -0.755 0.452
## 14 low L_L - L -0.512 0.198 126.838 -2.591 0.011 *
# --- PLOT MODEL RESIDUALS --- #
# Simulate residuals from the fitted model
simulationOutput <- simulateResiduals(fittedModel = model,
n = 1000)
# Test for overdispersion in the model to check if variance exceeds mean
testDispersion(simulationOutput)
##
## DHARMa nonparametric dispersion test via sd of residuals fitted vs.
## simulated
##
## data: simulationOutput
## dispersion = 1.0057, p-value = 0.894
## alternative hypothesis: two.sided
# Plot Q-Q and Residuals vs Fitted plots
plot(simulationOutput)
response_variable_selected = "species_richness"
# --- ORIGINAL DATA - PLOT MEAN ± 95% CI --- #
p1 = plot.all.patches.points(data = ds_ecosystems %>%
filter(disturbance == "high"),
response_variable_selected) +
annotate("text",
x = Inf,
y = Inf,
label = "High disturbance",
hjust = 1.1,
vjust = 1.1,
size = 5,
fontface = "bold") + # Adjust position and style
theme(axis.title.x = element_blank(), # Remove x-axis title
axis.text.x = element_blank()) # Remove x-axis text
p2 = plot.all.patches.points(data = ds_ecosystems %>%
filter(disturbance == "low"),
response_variable_selected) +
theme(legend.position = "none") +
annotate("text",
x = Inf,
y = Inf,
label = "Low disturbance",
hjust = 1.1,
vjust = 1.1,
size = 5,
fontface = "bold") # Adjust position and style
# Arrange the plots vertically
ggarrange(p1,
p2,
ncol = 1,
nrow = 2,
heights = c(1, 0.7))
# --- PREPARE DATA FOR ANALYSIS --- #
# Add baselines
baselines = ds_ecosystems %>%
filter(time_point == time_point_of_baselines) %>%
select(culture_ID,
all_of(response_variable_selected)) %>%
rename(baseline = all_of(response_variable_selected))
data_for_analysis = ds_ecosystems %>%
left_join(baselines)
# Filter data and change level names
data_for_analysis = data_for_analysis %>%
filter(time_point %in% time_points_model,
#!is.na(water_addition_ml),
!is.na(!!sym(response_variable_selected)),
!is.infinite(!!sym(response_variable_selected))) %>%
mutate(ecosystem_type = case_when(ecosystem_type == "Small unconnected" ~ "S",
ecosystem_type == "Medium unconnected" ~ "M",
ecosystem_type == "Large unconnected" ~ "L",
ecosystem_type == "Small connected to small" ~ "S_S",
ecosystem_type == "Small connected to large" ~ "S_L",
ecosystem_type == "Medium connected to medium" ~ "M_M",
ecosystem_type == "Large connected to large" ~ "L_L",
ecosystem_type == "Large connected to small" ~ "L_S",
TRUE ~ ecosystem_type)) %>%
ungroup()
# --- DATA FOR ANALYSIS - PLOT MEAN ± 95% CI --- #
p1 = plot.all.patches.points(data = data_for_analysis %>%
filter(disturbance == "high"),
response_variable_selected) +
annotate("text",
x = Inf,
y = Inf,
label = "High disturbance",
hjust = 1.1,
vjust = 1.1,
size = 5,
fontface = "bold") + # Adjust position and style
theme(axis.title.x = element_blank(), # Remove x-axis title
axis.text.x = element_blank()) # Remove x-axis text
p2 = plot.all.patches.points(data = data_for_analysis %>%
filter(disturbance == "low"),
response_variable_selected) +
theme(legend.position = "none") +
annotate("text",
x = Inf,
y = Inf,
label = "Low disturbance",
hjust = 1.1,
vjust = 1.1,
size = 5,
fontface = "bold") # Adjust position and style
# Arrange the plots vertically
ggarrange(p1,
p2,
ncol = 1,
nrow = 2,
heights = c(1, 0.7))
# --- MODEL - CONSTRUCT --- #
# Define formula
formula = paste("get(response_variable_selected) ~",
"ecosystem_type * disturbance * day +",
"(day | culture_ID) +",
"(day | baseline)") %>%
print()
## [1] "get(response_variable_selected) ~ ecosystem_type * disturbance * day + (day | culture_ID) + (day | baseline)"
# Construct model
model = try.different.optimizers.ecos(data_for_analysis,
formula)
## [1] "Model successfully fitted with optimizer: bobyqa "
# --- MODEL - SUMMARY --- #
print(summary(model), digits = 1)
## Linear mixed model fit by maximum likelihood . t-tests use Satterthwaite's
## method [lmerModLmerTest]
## Formula: formula
## Data: data
## Control: lmerControl(optimizer = opt$optimizer)
##
## AIC BIC logLik deviance df.resid
## 2273.8 2448.7 -1097.9 2195.8 615
##
## Scaled residuals:
## Min 1Q Median 3Q Max
## -3.6 -0.6 0.0 0.6 3.2
##
## Random effects:
## Groups Name Variance Std.Dev. Corr
## culture_ID (Intercept) 7e-05 0.008
## day 9e-06 0.003 -1.00
## baseline (Intercept) 1e-01 0.321
## day 7e-05 0.009 -1.00
## Residual 2e+00 1.289
## Number of obs: 654, groups: culture_ID, 109; baseline, 8
##
## Fixed effects:
## Estimate Std. Error df t value
## (Intercept) 7e+00 7e-01 3e+02 10.5
## ecosystem_typeL_L 3e-01 8e-01 5e+02 0.4
## ecosystem_typeL_S -8e-01 9e-01 5e+02 -0.9
## ecosystem_typeM 3e-01 9e-01 5e+02 0.4
## ecosystem_typeM_M 5e-01 8e-01 5e+02 0.6
## ecosystem_typeS -1e+00 1e+00 5e+02 -1.2
## ecosystem_typeS_L 1e+00 9e-01 5e+02 1.0
## ecosystem_typeS_S -9e-01 8e-01 5e+02 -1.1
## disturbancelow 5e-01 9e-01 5e+02 0.6
## day -2e-02 3e-02 5e+02 -0.5
## ecosystem_typeL_L:disturbancelow -8e-01 1e+00 5e+02 -0.7
## ecosystem_typeL_S:disturbancelow -5e-01 1e+00 5e+02 -0.4
## ecosystem_typeM:disturbancelow -6e-01 1e+00 5e+02 -0.5
## ecosystem_typeM_M:disturbancelow -5e-01 1e+00 5e+02 -0.5
## ecosystem_typeS:disturbancelow 2e+00 1e+00 5e+02 1.5
## ecosystem_typeS_L:disturbancelow -1e+00 1e+00 5e+02 -1.1
## ecosystem_typeS_S:disturbancelow 1e+00 1e+00 5e+02 0.8
## ecosystem_typeL_L:day -4e-02 4e-02 6e+02 -1.0
## ecosystem_typeL_S:day -4e-03 5e-02 6e+02 -0.1
## ecosystem_typeM:day -9e-02 5e-02 6e+02 -1.8
## ecosystem_typeM_M:day -8e-02 4e-02 6e+02 -1.9
## ecosystem_typeS:day -2e-01 5e-02 6e+02 -4.1
## ecosystem_typeS_L:day -2e-01 5e-02 6e+02 -5.1
## ecosystem_typeS_S:day -2e-01 4e-02 6e+02 -4.5
## disturbancelow:day -7e-03 5e-02 6e+02 -0.1
## ecosystem_typeL_L:disturbancelow:day 2e-02 6e-02 6e+02 0.3
## ecosystem_typeL_S:disturbancelow:day 3e-02 7e-02 6e+02 0.5
## ecosystem_typeM:disturbancelow:day 3e-02 7e-02 6e+02 0.4
## ecosystem_typeM_M:disturbancelow:day 4e-02 6e-02 6e+02 0.6
## ecosystem_typeS:disturbancelow:day -2e-02 7e-02 6e+02 -0.3
## ecosystem_typeS_L:disturbancelow:day 2e-01 7e-02 6e+02 2.5
## ecosystem_typeS_S:disturbancelow:day 7e-04 6e-02 6e+02 0.0
## Pr(>|t|)
## (Intercept) <2e-16 ***
## ecosystem_typeL_L 0.69
## ecosystem_typeL_S 0.37
## ecosystem_typeM 0.72
## ecosystem_typeM_M 0.53
## ecosystem_typeS 0.23
## ecosystem_typeS_L 0.31
## ecosystem_typeS_S 0.27
## disturbancelow 0.57
## day 0.59
## ecosystem_typeL_L:disturbancelow 0.49
## ecosystem_typeL_S:disturbancelow 0.69
## ecosystem_typeM:disturbancelow 0.64
## ecosystem_typeM_M:disturbancelow 0.65
## ecosystem_typeS:disturbancelow 0.15
## ecosystem_typeS_L:disturbancelow 0.27
## ecosystem_typeS_S:disturbancelow 0.40
## ecosystem_typeL_L:day 0.31
## ecosystem_typeL_S:day 0.93
## ecosystem_typeM:day 0.07 .
## ecosystem_typeM_M:day 0.06 .
## ecosystem_typeS:day 4e-05 ***
## ecosystem_typeS_L:day 5e-07 ***
## ecosystem_typeS_S:day 7e-06 ***
## disturbancelow:day 0.89
## ecosystem_typeL_L:disturbancelow:day 0.73
## ecosystem_typeL_S:disturbancelow:day 0.65
## ecosystem_typeM:disturbancelow:day 0.72
## ecosystem_typeM_M:disturbancelow:day 0.52
## ecosystem_typeS:disturbancelow:day 0.80
## ecosystem_typeS_L:disturbancelow:day 0.01 *
## ecosystem_typeS_S:disturbancelow:day 0.99
## ---
## Signif. codes: 0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1
## optimizer (bobyqa) convergence code: 0 (OK)
## boundary (singular) fit: see help('isSingular')
# --- MODEL - ANOVA --- #
car::Anova(model, type = "III")
## Analysis of Deviance Table (Type III Wald chisquare tests)
##
## Response: get(response_variable_selected)
## Chisq Df Pr(>Chisq)
## (Intercept) 110.2649 1 < 2.2e-16 ***
## ecosystem_type 11.8520 7 0.1055
## disturbance 0.3227 1 0.5700
## day 0.2915 1 0.5893
## ecosystem_type:disturbance 11.0533 7 0.1363
## ecosystem_type:day 63.3876 7 3.169e-11 ***
## disturbance:day 0.0199 1 0.8879
## ecosystem_type:disturbance:day 10.9139 7 0.1424
## ---
## Signif. codes: 0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1
# --- ESTIMATE MARGINAL MEANS --- #
emmeans_output = emmeans(model,
specs = ~ ecosystem_type * disturbance * day,
adjust = "sidak",
bias.adj = TRUE,
lmer.df = "satterthwaite")
emmeans_output
## ecosystem_type disturbance day emmean SE df lower.CL upper.CL
## L high 18 6.84 0.251 115.7 6.081 7.59
## L_L high 18 6.40 0.181 77.8 5.845 6.95
## L_S high 18 5.92 0.250 116.9 5.168 6.67
## M high 18 5.58 0.248 124.6 4.835 6.32
## M_M high 18 5.94 0.183 69.6 5.381 6.50
## S high 18 1.78 0.277 128.8 0.948 2.61
## S_L high 18 3.34 0.248 125.2 2.596 4.09
## S_S high 18 2.46 0.182 75.3 1.902 3.01
## L low 18 7.25 0.252 114.4 6.492 8.01
## L_L low 18 6.38 0.182 74.8 5.826 6.94
## L_S low 18 6.38 0.250 115.3 5.621 7.13
## M low 18 5.81 0.255 108.3 5.042 6.58
## M_M low 18 6.51 0.183 69.1 5.947 7.06
## S low 18 3.87 0.250 117.8 3.119 4.62
## S_L low 18 5.42 0.249 120.2 4.674 6.17
## S_S low 18 3.86 0.187 64.8 3.292 4.44
##
## Degrees-of-freedom method: satterthwaite
## Results are given on the get (not the response) scale.
## Confidence level used: 0.95
## Conf-level adjustment: sidak method for 16 estimates
# --- CODE EACH LEVEL OF DISTURBANCE AND ECOSYSTEM TYPE TO THEN PRODUCE CONTRASTS --- #
high_L = c(1, rep(0,15))
high_L_L = c(rep(0,1), 1, rep(0,14))
high_L_S = c(rep(0,2), 1, rep(0,13))
high_M = c(rep(0,3), 1, rep(0,12))
high_M_M = c(rep(0,4), 1, rep(0,11))
high_S = c(rep(0,5), 1, rep(0,10))
high_S_L = c(rep(0,6), 1, rep(0,9))
high_S_S = c(rep(0,7), 1, rep(0,8))
low_L = c(rep(0,8), 1, rep(0,7))
low_L_L = c(rep(0,9), 1, rep(0,6))
low_L_S = c(rep(0,10), 1, rep(0,5))
low_M = c(rep(0,11), 1, rep(0,4))
low_M_M = c(rep(0,12), 1, rep(0,3))
low_S = c(rep(0,13), 1, rep(0,2))
low_S_L = c(rep(0,14), 1, rep(0,1))
low_S_S = c(rep(0,15), 1)
# --- PRODUCE ECOSYSTEM TYPE CONTRASTS --- #
# Set parameters
n_of_digits = 3
# Compute constrasts
contrasts = contrast(emmeans_output,
method = list("high L - S" = high_L - high_S,
"high M - S" = high_M - high_S,
"low L - S" = low_L - low_S,
"low M - S" = low_M - low_S)) %>%
as.data.frame() %>%
mutate(p.value = round(p.value, digits = n_of_digits),
estimate = round(estimate, digits = n_of_digits),
SE = round(SE, digits = n_of_digits),
df = round(df, digits = n_of_digits),
t.ratio = round(t.ratio, digits = n_of_digits),
e = "",
e = ifelse(p.value > 0.1,
"",
e),
e = ifelse(p.value < 0.05,
"*",
e),
e = ifelse(p.value < 0.01,
"**",
e),
e = ifelse(p.value < 0.001,
"***",
e)) %>%
rename(" " = e)
# --- PRODUCE ECOSYSTEM TYPE CONTRASTS --- #
# Set parameters
n_of_digits = 3
# Compute constrasts
contrasts = contrast(emmeans_output,
method = list("high S_L - S" = high_S_L - high_S,
"high S_L - S_S" = high_S_L - high_S_S,
"high S_S - S" = high_S_S - high_S,
"high M_M - M" = high_M_M - high_M,
"high L_S - L" = high_L_S - high_L,
"high L_S - L_L" = high_L_S - high_L_L,
"high L_L - L" = high_L_L - high_L,
"low S_L - S" = low_S_L - low_S,
"low S_L - S_S" = low_S_L - low_S_S,
"low S_S - S" = low_S_S - low_S,
"low M_M - M" = low_M_M - low_M,
"low L_S - L" = low_L_S - low_L,
"low L_S - L_L" = low_L_S - low_L_L,
"low L_L - L" = low_L_L - low_L)) %>%
as.data.frame() %>%
mutate(p.value = round(p.value, digits = n_of_digits),
estimate = round(estimate, digits = n_of_digits),
SE = round(SE, digits = n_of_digits),
df = round(df, digits = n_of_digits),
t.ratio = round(t.ratio, digits = n_of_digits),
e = "",
e = ifelse(p.value > 0.1,
"",
e),
e = ifelse(p.value < 0.05,
"*",
e),
e = ifelse(p.value < 0.01,
"**",
e),
e = ifelse(p.value < 0.001,
"***",
e)) %>%
rename(" " = e)
# --- SHOW ECOSYSTEM TYPE CONSTRASTS --- #
contrasts
## contrast estimate SE df t.ratio p.value
## 1 high S_L - S 1.560 0.363 155.966 4.300 0.000 ***
## 2 high S_L - S_S 0.886 0.293 154.170 3.021 0.003 **
## 3 high S_S - S 0.674 0.316 153.687 2.133 0.035 *
## 4 high M_M - M 0.359 0.293 154.093 1.225 0.222
## 5 high L_S - L -0.916 0.336 151.815 -2.723 0.007 **
## 6 high L_S - L_L -0.474 0.294 154.833 -1.614 0.109
## 7 high L_L - L -0.442 0.295 155.519 -1.498 0.136
## 8 low S_L - S 1.550 0.338 153.410 4.587 0.000 ***
## 9 low S_L - S_S 1.557 0.291 151.585 5.348 0.000 ***
## 10 low S_S - S -0.007 0.293 153.952 -0.024 0.981
## 11 low M_M - M 0.694 0.295 155.458 2.350 0.020 *
## 12 low L_S - L -0.874 0.343 156.316 -2.551 0.012 *
## 13 low L_S - L_L -0.006 0.294 155.074 -0.021 0.983
## 14 low L_L - L -0.868 0.294 155.060 -2.949 0.004 **
# --- PLOT MODEL RESIDUALS --- #
# Simulate residuals from the fitted model
simulationOutput <- simulateResiduals(fittedModel = model,
n = 1000)
# Test for overdispersion in the model to check if variance exceeds mean
testDispersion(simulationOutput)
##
## DHARMa nonparametric dispersion test via sd of residuals fitted vs.
## simulated
##
## data: simulationOutput
## dispersion = 1.004, p-value = 0.96
## alternative hypothesis: two.sided
# Plot Q-Q and Residuals vs Fitted plots
plot(simulationOutput)
response_variable_selected = "evenness_pielou"
# --- ORIGINAL DATA - PLOT MEAN ± 95% CI --- #
p1 = plot.all.patches.points(data = ds_ecosystems %>%
filter(disturbance == "high"),
response_variable_selected) +
annotate("text",
x = Inf,
y = Inf,
label = "High disturbance",
hjust = 1.1,
vjust = 1.1,
size = 5,
fontface = "bold") + # Adjust position and style
theme(axis.title.x = element_blank(), # Remove x-axis title
axis.text.x = element_blank()) # Remove x-axis text
## Warning in qt(conf.interval/2 + 0.5, datac$N - 1): NaNs produced
p2 = plot.all.patches.points(data = ds_ecosystems %>%
filter(disturbance == "low"),
response_variable_selected) +
theme(legend.position = "none") +
annotate("text",
x = Inf,
y = Inf,
label = "Low disturbance",
hjust = 1.1,
vjust = 1.1,
size = 5,
fontface = "bold") # Adjust position and style
# Arrange the plots vertically
ggarrange(p1,
p2,
ncol = 1,
nrow = 2,
heights = c(1, 0.7))
# --- PREPARE DATA FOR ANALYSIS --- #
# Add baselines
baselines = ds_ecosystems %>%
filter(time_point == time_point_of_baselines) %>%
select(culture_ID,
all_of(response_variable_selected)) %>%
rename(baseline = all_of(response_variable_selected))
data_for_analysis = ds_ecosystems %>%
left_join(baselines)
# Filter data and change level names
data_for_analysis = data_for_analysis %>%
filter(time_point %in% time_points_model,
#!is.na(water_addition_ml),
!is.na(!!sym(response_variable_selected)),
!is.infinite(!!sym(response_variable_selected))) %>%
mutate(ecosystem_type = case_when(ecosystem_type == "Small unconnected" ~ "S",
ecosystem_type == "Medium unconnected" ~ "M",
ecosystem_type == "Large unconnected" ~ "L",
ecosystem_type == "Small connected to small" ~ "S_S",
ecosystem_type == "Small connected to large" ~ "S_L",
ecosystem_type == "Medium connected to medium" ~ "M_M",
ecosystem_type == "Large connected to large" ~ "L_L",
ecosystem_type == "Large connected to small" ~ "L_S",
TRUE ~ ecosystem_type)) %>%
ungroup()
# --- DATA FOR ANALYSIS - PLOT MEAN ± 95% CI --- #
p1 = plot.all.patches.points(data = data_for_analysis %>%
filter(disturbance == "high"),
response_variable_selected) +
annotate("text",
x = Inf,
y = Inf,
label = "High disturbance",
hjust = 1.1,
vjust = 1.1,
size = 5,
fontface = "bold") + # Adjust position and style
theme(axis.title.x = element_blank(), # Remove x-axis title
axis.text.x = element_blank()) # Remove x-axis text
## Warning in qt(conf.interval/2 + 0.5, datac$N - 1): NaNs produced
p2 = plot.all.patches.points(data = data_for_analysis %>%
filter(disturbance == "low"),
response_variable_selected) +
theme(legend.position = "none") +
annotate("text",
x = Inf,
y = Inf,
label = "Low disturbance",
hjust = 1.1,
vjust = 1.1,
size = 5,
fontface = "bold") # Adjust position and style
# Arrange the plots vertically
ggarrange(p1,
p2,
ncol = 1,
nrow = 2,
heights = c(1, 0.7))
# --- MODEL - CONSTRUCT --- #
# Define formula
formula = paste("get(response_variable_selected) ~",
"ecosystem_type * disturbance * day +",
"(day | culture_ID) +",
"(day | baseline)") %>%
print()
## [1] "get(response_variable_selected) ~ ecosystem_type * disturbance * day + (day | culture_ID) + (day | baseline)"
# Construct model
model = try.different.optimizers.ecos(data_for_analysis,
formula)
## [1] "Model successfully fitted with optimizer: bobyqa "
# --- MODEL - SUMMARY --- #
print(summary(model), digits = 1)
## Linear mixed model fit by maximum likelihood . t-tests use Satterthwaite's
## method [lmerModLmerTest]
## Formula: formula
## Data: data
## Control: lmerControl(optimizer = opt$optimizer)
##
## AIC BIC logLik deviance df.resid
## -692.0 -519.8 385.0 -770.0 573
##
## Scaled residuals:
## Min 1Q Median 3Q Max
## -5.7 -0.5 0.1 0.6 3.9
##
## Random effects:
## Groups Name Variance Std.Dev. Corr
## culture_ID (Intercept) 6e-16 2e-08
## day 1e-18 1e-09 -1.00
## baseline (Intercept) 4e-12 2e-06
## day 2e-14 1e-07 -1.00
## Residual 2e-02 1e-01
## Number of obs: 612, groups: culture_ID, 109; baseline, 109
##
## Fixed effects:
## Estimate Std. Error df t value
## (Intercept) 8e-01 7e-02 6e+02 11.4
## ecosystem_typeL_L -6e-02 8e-02 6e+02 -0.8
## ecosystem_typeL_S -8e-02 9e-02 6e+02 -0.9
## ecosystem_typeM -2e-01 9e-02 6e+02 -2.2
## ecosystem_typeM_M -8e-02 8e-02 6e+02 -0.9
## ecosystem_typeS 5e-01 1e-01 6e+02 4.3
## ecosystem_typeS_L 6e-02 1e-01 6e+02 0.6
## ecosystem_typeS_S 5e-01 8e-02 6e+02 5.5
## disturbancelow 2e-01 9e-02 6e+02 1.6
## day 5e-04 3e-03 6e+02 0.2
## ecosystem_typeL_L:disturbancelow -9e-02 1e-01 6e+02 -0.8
## ecosystem_typeL_S:disturbancelow -1e-01 1e-01 6e+02 -0.9
## ecosystem_typeM:disturbancelow -8e-02 1e-01 6e+02 -0.6
## ecosystem_typeM_M:disturbancelow -2e-01 1e-01 6e+02 -1.5
## ecosystem_typeS:disturbancelow -5e-01 1e-01 6e+02 -3.5
## ecosystem_typeS_L:disturbancelow -3e-01 1e-01 6e+02 -2.2
## ecosystem_typeS_S:disturbancelow -6e-01 1e-01 6e+02 -5.3
## ecosystem_typeL_L:day 5e-03 4e-03 6e+02 1.1
## ecosystem_typeL_S:day 5e-03 5e-03 6e+02 1.0
## ecosystem_typeM:day 1e-02 5e-03 6e+02 2.3
## ecosystem_typeM_M:day 5e-03 4e-03 6e+02 1.1
## ecosystem_typeS:day -5e-02 6e-03 6e+02 -8.0
## ecosystem_typeS_L:day 4e-03 5e-03 6e+02 0.7
## ecosystem_typeS_S:day -3e-02 4e-03 6e+02 -6.8
## disturbancelow:day -8e-03 5e-03 6e+02 -1.6
## ecosystem_typeL_L:disturbancelow:day 4e-03 6e-03 6e+02 0.7
## ecosystem_typeL_S:disturbancelow:day 6e-03 7e-03 6e+02 0.9
## ecosystem_typeM:disturbancelow:day 6e-03 7e-03 6e+02 0.9
## ecosystem_typeM_M:disturbancelow:day 6e-03 6e-03 6e+02 1.0
## ecosystem_typeS:disturbancelow:day 5e-02 8e-03 6e+02 6.3
## ecosystem_typeS_L:disturbancelow:day 1e-02 7e-03 6e+02 1.6
## ecosystem_typeS_S:disturbancelow:day 4e-02 6e-03 6e+02 6.7
## Pr(>|t|)
## (Intercept) <2e-16 ***
## ecosystem_typeL_L 0.44
## ecosystem_typeL_S 0.37
## ecosystem_typeM 0.03 *
## ecosystem_typeM_M 0.35
## ecosystem_typeS 2e-05 ***
## ecosystem_typeS_L 0.52
## ecosystem_typeS_S 7e-08 ***
## disturbancelow 0.10
## day 0.88
## ecosystem_typeL_L:disturbancelow 0.42
## ecosystem_typeL_S:disturbancelow 0.38
## ecosystem_typeM:disturbancelow 0.53
## ecosystem_typeM_M:disturbancelow 0.13
## ecosystem_typeS:disturbancelow 6e-04 ***
## ecosystem_typeS_L:disturbancelow 0.03 *
## ecosystem_typeS_S:disturbancelow 1e-07 ***
## ecosystem_typeL_L:day 0.26
## ecosystem_typeL_S:day 0.30
## ecosystem_typeM:day 0.02 *
## ecosystem_typeM_M:day 0.25
## ecosystem_typeS:day 7e-15 ***
## ecosystem_typeS_L:day 0.47
## ecosystem_typeS_S:day 3e-11 ***
## disturbancelow:day 0.11
## ecosystem_typeL_L:disturbancelow:day 0.50
## ecosystem_typeL_S:disturbancelow:day 0.37
## ecosystem_typeM:disturbancelow:day 0.37
## ecosystem_typeM_M:disturbancelow:day 0.30
## ecosystem_typeS:disturbancelow:day 6e-10 ***
## ecosystem_typeS_L:disturbancelow:day 0.10
## ecosystem_typeS_S:disturbancelow:day 6e-11 ***
## ---
## Signif. codes: 0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1
## optimizer (bobyqa) convergence code: 0 (OK)
## boundary (singular) fit: see help('isSingular')
# --- MODEL - ANOVA --- #
car::Anova(model, type = "III")
## Analysis of Deviance Table (Type III Wald chisquare tests)
##
## Response: get(response_variable_selected)
## Chisq Df Pr(>Chisq)
## (Intercept) 130.1357 1 < 2.2e-16 ***
## ecosystem_type 121.9831 7 < 2.2e-16 ***
## disturbance 2.6800 1 0.1016
## day 0.0240 1 0.8769
## ecosystem_type:disturbance 53.2690 7 3.28e-09 ***
## ecosystem_type:day 225.4252 7 < 2.2e-16 ***
## disturbance:day 2.5267 1 0.1119
## ecosystem_type:disturbance:day 111.4767 7 < 2.2e-16 ***
## ---
## Signif. codes: 0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1
# --- ESTIMATE MARGINAL MEANS --- #
emmeans_output = emmeans(model,
specs = ~ ecosystem_type * disturbance * day,
adjust = "sidak",
bias.adj = TRUE,
lmer.df = "satterthwaite")
emmeans_output
## ecosystem_type disturbance day emmean SE df lower.CL upper.CL
## L high 17.7 0.767 0.0236 612 0.697 0.836
## L_L high 17.7 0.789 0.0167 612 0.739 0.838
## L_S high 17.7 0.772 0.0236 612 0.702 0.842
## M high 17.7 0.756 0.0236 612 0.687 0.826
## M_M high 17.7 0.777 0.0167 612 0.728 0.826
## S high 17.7 0.415 0.0349 612 0.312 0.518
## S_L high 17.7 0.899 0.0276 612 0.817 0.981
## S_S high 17.7 0.687 0.0199 612 0.629 0.746
## L low 17.7 0.783 0.0236 612 0.713 0.853
## L_L low 17.7 0.783 0.0167 612 0.734 0.832
## L_S low 17.7 0.782 0.0236 612 0.712 0.852
## M low 17.7 0.799 0.0236 612 0.730 0.869
## M_M low 17.7 0.729 0.0167 612 0.680 0.779
## S low 17.7 0.783 0.0249 612 0.709 0.856
## S_L low 17.7 0.823 0.0236 612 0.753 0.893
## S_S low 17.7 0.814 0.0176 612 0.762 0.866
##
## Degrees-of-freedom method: satterthwaite
## Results are given on the get (not the response) scale.
## Confidence level used: 0.95
## Conf-level adjustment: sidak method for 16 estimates
# --- CODE EACH LEVEL OF DISTURBANCE AND ECOSYSTEM TYPE TO THEN PRODUCE CONTRASTS --- #
high_L = c(1, rep(0,15))
high_L_L = c(rep(0,1), 1, rep(0,14))
high_L_S = c(rep(0,2), 1, rep(0,13))
high_M = c(rep(0,3), 1, rep(0,12))
high_M_M = c(rep(0,4), 1, rep(0,11))
high_S = c(rep(0,5), 1, rep(0,10))
high_S_L = c(rep(0,6), 1, rep(0,9))
high_S_S = c(rep(0,7), 1, rep(0,8))
low_L = c(rep(0,8), 1, rep(0,7))
low_L_L = c(rep(0,9), 1, rep(0,6))
low_L_S = c(rep(0,10), 1, rep(0,5))
low_M = c(rep(0,11), 1, rep(0,4))
low_M_M = c(rep(0,12), 1, rep(0,3))
low_S = c(rep(0,13), 1, rep(0,2))
low_S_L = c(rep(0,14), 1, rep(0,1))
low_S_S = c(rep(0,15), 1)
# --- PRODUCE ECOSYSTEM TYPE CONTRASTS --- #
# Set parameters
n_of_digits = 3
# Compute constrasts
contrasts = contrast(emmeans_output,
method = list("high L - S" = high_L - high_S,
"high M - S" = high_M - high_S,
"low L - S" = low_L - low_S,
"low M - S" = low_M - low_S)) %>%
as.data.frame() %>%
mutate(p.value = round(p.value, digits = n_of_digits),
estimate = round(estimate, digits = n_of_digits),
SE = round(SE, digits = n_of_digits),
df = round(df, digits = n_of_digits),
t.ratio = round(t.ratio, digits = n_of_digits),
e = "",
e = ifelse(p.value > 0.1,
"",
e),
e = ifelse(p.value < 0.05,
"*",
e),
e = ifelse(p.value < 0.01,
"**",
e),
e = ifelse(p.value < 0.001,
"***",
e)) %>%
rename(" " = e)
# --- PRODUCE ECOSYSTEM TYPE CONTRASTS --- #
# Set parameters
n_of_digits = 3
# Compute constrasts
contrasts = contrast(emmeans_output,
method = list("high S_L - S" = high_S_L - high_S,
"high S_L - S_S" = high_S_L - high_S_S,
"high S_S - S" = high_S_S - high_S,
"high M_M - M" = high_M_M - high_M,
"high L_S - L" = high_L_S - high_L,
"high L_S - L_L" = high_L_S - high_L_L,
"high L_L - L" = high_L_L - high_L,
"low S_L - S" = low_S_L - low_S,
"low S_L - S_S" = low_S_L - low_S_S,
"low S_S - S" = low_S_S - low_S,
"low M_M - M" = low_M_M - low_M,
"low L_S - L" = low_L_S - low_L,
"low L_S - L_L" = low_L_S - low_L_L,
"low L_L - L" = low_L_L - low_L)) %>%
as.data.frame() %>%
mutate(p.value = round(p.value, digits = n_of_digits),
estimate = round(estimate, digits = n_of_digits),
SE = round(SE, digits = n_of_digits),
df = round(df, digits = n_of_digits),
t.ratio = round(t.ratio, digits = n_of_digits),
e = "",
e = ifelse(p.value > 0.1,
"",
e),
e = ifelse(p.value < 0.05,
"*",
e),
e = ifelse(p.value < 0.01,
"**",
e),
e = ifelse(p.value < 0.001,
"***",
e)) %>%
rename(" " = e)
# --- SHOW ECOSYSTEM TYPE CONSTRASTS --- #
contrasts
## contrast estimate SE df t.ratio p.value
## 1 high S_L - S 0.484 0.044 612 10.869 0.000 ***
## 2 high S_L - S_S 0.211 0.034 612 6.207 0.000 ***
## 3 high S_S - S 0.272 0.040 612 6.786 0.000 ***
## 4 high M_M - M 0.021 0.029 612 0.715 0.475
## 5 high L_S - L 0.006 0.033 612 0.166 0.868
## 6 high L_S - L_L -0.017 0.029 612 -0.575 0.566
## 7 high L_L - L 0.022 0.029 612 0.766 0.444
## 8 low S_L - S 0.040 0.034 612 1.173 0.241
## 9 low S_L - S_S 0.009 0.029 612 0.299 0.765
## 10 low S_S - S 0.031 0.030 612 1.032 0.303
## 11 low M_M - M -0.070 0.029 612 -2.423 0.016 *
## 12 low L_S - L -0.001 0.033 612 -0.027 0.978
## 13 low L_S - L_L -0.001 0.029 612 -0.023 0.982
## 14 low L_L - L 0.000 0.029 612 -0.009 0.993
# --- PLOT MODEL RESIDUALS --- #
# Simulate residuals from the fitted model
simulationOutput <- simulateResiduals(fittedModel = model,
n = 1000)
# Test for overdispersion in the model to check if variance exceeds mean
testDispersion(simulationOutput)
##
## DHARMa nonparametric dispersion test via sd of residuals fitted vs.
## simulated
##
## data: simulationOutput
## dispersion = 1.0036, p-value = 0.944
## alternative hypothesis: two.sided
# Plot Q-Q and Residuals vs Fitted plots
plot(simulationOutput)
response_variable_selected = "median_body_area_µm2"
# --- ORIGINAL DATA - PLOT MEAN ± 95% CI --- #
p1 = plot.all.patches.points(data = ds_ecosystems %>%
filter(disturbance == "high"),
response_variable_selected) +
annotate("text",
x = Inf,
y = Inf,
label = "High disturbance",
hjust = 1.1,
vjust = 1.1,
size = 5,
fontface = "bold") + # Adjust position and style
theme(axis.title.x = element_blank(), # Remove x-axis title
axis.text.x = element_blank()) # Remove x-axis text
## Warning in qt(conf.interval/2 + 0.5, datac$N - 1): NaNs produced
p2 = plot.all.patches.points(data = ds_ecosystems %>%
filter(disturbance == "low"),
response_variable_selected) +
theme(legend.position = "none") +
annotate("text",
x = Inf,
y = Inf,
label = "Low disturbance",
hjust = 1.1,
vjust = 1.1,
size = 5,
fontface = "bold") # Adjust position and style
# Arrange the plots vertically
ggarrange(p1,
p2,
ncol = 1,
nrow = 2,
heights = c(1, 0.7))
# --- PREPARE DATA FOR ANALYSIS --- #
# Add baselines
baselines = ds_ecosystems %>%
filter(time_point == time_point_of_baselines) %>%
select(culture_ID,
all_of(response_variable_selected)) %>%
rename(baseline = all_of(response_variable_selected))
data_for_analysis = ds_ecosystems %>%
left_join(baselines)
# Filter data and change level names
data_for_analysis = data_for_analysis %>%
filter(time_point %in% time_points_model,
#!is.na(water_addition_ml),
!is.na(!!sym(response_variable_selected)),
!is.infinite(!!sym(response_variable_selected))) %>%
mutate(ecosystem_type = case_when(ecosystem_type == "Small unconnected" ~ "S",
ecosystem_type == "Medium unconnected" ~ "M",
ecosystem_type == "Large unconnected" ~ "L",
ecosystem_type == "Small connected to small" ~ "S_S",
ecosystem_type == "Small connected to large" ~ "S_L",
ecosystem_type == "Medium connected to medium" ~ "M_M",
ecosystem_type == "Large connected to large" ~ "L_L",
ecosystem_type == "Large connected to small" ~ "L_S",
TRUE ~ ecosystem_type)) %>%
ungroup()
# --- DATA FOR ANALYSIS - PLOT MEAN ± 95% CI --- #
p1 = plot.all.patches.points(data = data_for_analysis %>%
filter(disturbance == "high"),
response_variable_selected) +
annotate("text",
x = Inf,
y = Inf,
label = "High disturbance",
hjust = 1.1,
vjust = 1.1,
size = 5,
fontface = "bold") + # Adjust position and style
theme(axis.title.x = element_blank(), # Remove x-axis title
axis.text.x = element_blank()) # Remove x-axis text
## Warning in qt(conf.interval/2 + 0.5, datac$N - 1): NaNs produced
p2 = plot.all.patches.points(data = data_for_analysis %>%
filter(disturbance == "low"),
response_variable_selected) +
theme(legend.position = "none") +
annotate("text",
x = Inf,
y = Inf,
label = "Low disturbance",
hjust = 1.1,
vjust = 1.1,
size = 5,
fontface = "bold") # Adjust position and style
# Arrange the plots vertically
ggarrange(p1,
p2,
ncol = 1,
nrow = 2,
heights = c(1, 0.7))
# --- MODEL - CONSTRUCT --- #
# Define formula
formula = paste("get(response_variable_selected) ~",
"ecosystem_type * disturbance * day +",
"(day | culture_ID) +",
"(day | baseline)") %>%
print()
## [1] "get(response_variable_selected) ~ ecosystem_type * disturbance * day + (day | culture_ID) + (day | baseline)"
# Construct model
model = try.different.optimizers.ecos(data_for_analysis,
formula)
## [1] "Model successfully fitted with optimizer: bobyqa "
# --- MODEL - SUMMARY --- #
print(summary(model), digits = 1)
## Linear mixed model fit by maximum likelihood . t-tests use Satterthwaite's
## method [lmerModLmerTest]
## Formula: formula
## Data: data
## Control: lmerControl(optimizer = opt$optimizer)
##
## AIC BIC logLik deviance df.resid
## 10359.9 10534.0 -5141.0 10281.9 602
##
## Scaled residuals:
## Min 1Q Median 3Q Max
## -4.3 -0.5 0.0 0.5 3.8
##
## Random effects:
## Groups Name Variance Std.Dev. Corr
## culture_ID (Intercept) 7e+03 83
## day 8e+01 9 -1.00
## baseline (Intercept) 4e+04 204
## day 5e+02 21 -1.00
## Residual 5e+05 702
## Number of obs: 641, groups: culture_ID, 109; baseline, 109
##
## Fixed effects:
## Estimate Std. Error df t value
## (Intercept) 3e+03 4e+02 3e+02 8.9
## ecosystem_typeL_L -4e+02 5e+02 3e+02 -1.0
## ecosystem_typeL_S 3e+02 5e+02 3e+02 0.5
## ecosystem_typeM -2e+01 5e+02 3e+02 0.0
## ecosystem_typeM_M 7e+01 5e+02 3e+02 0.1
## ecosystem_typeS -1e+03 6e+02 3e+02 -2.1
## ecosystem_typeS_L -1e+03 5e+02 3e+02 -1.8
## ecosystem_typeS_S -1e+01 5e+02 3e+02 0.0
## disturbancelow -1e+02 5e+02 3e+02 -0.2
## day -7e+00 2e+01 2e+02 -0.3
## ecosystem_typeL_L:disturbancelow -4e+02 6e+02 3e+02 -0.6
## ecosystem_typeL_S:disturbancelow -1e+03 7e+02 3e+02 -1.5
## ecosystem_typeM:disturbancelow 6e+01 7e+02 3e+02 0.1
## ecosystem_typeM_M:disturbancelow 6e+01 6e+02 3e+02 0.1
## ecosystem_typeS:disturbancelow 1e+03 8e+02 3e+02 1.5
## ecosystem_typeS_L:disturbancelow 9e+02 7e+02 3e+02 1.2
## ecosystem_typeS_S:disturbancelow -2e+02 7e+02 3e+02 -0.3
## ecosystem_typeL_L:day 6e+01 3e+01 2e+02 2.1
## ecosystem_typeL_S:day -3e-01 3e+01 2e+02 0.0
## ecosystem_typeM:day 3e+01 3e+01 2e+02 1.0
## ecosystem_typeM_M:day 2e+01 3e+01 2e+02 0.8
## ecosystem_typeS:day 7e+01 4e+01 2e+02 1.9
## ecosystem_typeS_L:day 9e+01 3e+01 2e+02 3.0
## ecosystem_typeS_S:day -4e+00 3e+01 2e+02 -0.1
## disturbancelow:day 2e+01 3e+01 2e+02 0.8
## ecosystem_typeL_L:disturbancelow:day -1e+00 4e+01 2e+02 0.0
## ecosystem_typeL_S:disturbancelow:day 5e+01 4e+01 2e+02 1.1
## ecosystem_typeM:disturbancelow:day -2e+01 4e+01 2e+02 -0.6
## ecosystem_typeM_M:disturbancelow:day -3e+01 4e+01 2e+02 -0.7
## ecosystem_typeS:disturbancelow:day -6e+01 5e+01 2e+02 -1.3
## ecosystem_typeS_L:disturbancelow:day -7e+01 4e+01 2e+02 -1.7
## ecosystem_typeS_S:disturbancelow:day 2e+01 4e+01 2e+02 0.6
## Pr(>|t|)
## (Intercept) <2e-16 ***
## ecosystem_typeL_L 0.333
## ecosystem_typeL_S 0.584
## ecosystem_typeM 0.968
## ecosystem_typeM_M 0.882
## ecosystem_typeS 0.036 *
## ecosystem_typeS_L 0.067 .
## ecosystem_typeS_S 0.979
## disturbancelow 0.850
## day 0.734
## ecosystem_typeL_L:disturbancelow 0.535
## ecosystem_typeL_S:disturbancelow 0.146
## ecosystem_typeM:disturbancelow 0.933
## ecosystem_typeM_M:disturbancelow 0.932
## ecosystem_typeS:disturbancelow 0.146
## ecosystem_typeS_L:disturbancelow 0.218
## ecosystem_typeS_S:disturbancelow 0.802
## ecosystem_typeL_L:day 0.037 *
## ecosystem_typeL_S:day 0.993
## ecosystem_typeM:day 0.303
## ecosystem_typeM_M:day 0.423
## ecosystem_typeS:day 0.059 .
## ecosystem_typeS_L:day 0.003 **
## ecosystem_typeS_S:day 0.895
## disturbancelow:day 0.417
## ecosystem_typeL_L:disturbancelow:day 0.978
## ecosystem_typeL_S:disturbancelow:day 0.257
## ecosystem_typeM:disturbancelow:day 0.562
## ecosystem_typeM_M:disturbancelow:day 0.485
## ecosystem_typeS:disturbancelow:day 0.192
## ecosystem_typeS_L:disturbancelow:day 0.099 .
## ecosystem_typeS_S:disturbancelow:day 0.551
## ---
## Signif. codes: 0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1
## optimizer (bobyqa) convergence code: 0 (OK)
## boundary (singular) fit: see help('isSingular')
# --- MODEL - ANOVA --- #
car::Anova(model, type = "III")
## Analysis of Deviance Table (Type III Wald chisquare tests)
##
## Response: get(response_variable_selected)
## Chisq Df Pr(>Chisq)
## (Intercept) 79.3960 1 < 2.2e-16 ***
## ecosystem_type 13.9140 7 0.052733 .
## disturbance 0.0358 1 0.849879
## day 0.1156 1 0.733849
## ecosystem_type:disturbance 12.4010 7 0.088118 .
## ecosystem_type:day 20.7691 7 0.004127 **
## disturbance:day 0.6627 1 0.415605
## ecosystem_type:disturbance:day 13.0090 7 0.071889 .
## ---
## Signif. codes: 0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1
# --- ESTIMATE MARGINAL MEANS --- #
emmeans_output = emmeans(model,
specs = ~ ecosystem_type * disturbance * day,
adjust = "sidak",
bias.adj = TRUE,
lmer.df = "satterthwaite")
emmeans_output
## ecosystem_type disturbance day emmean SE df lower.CL upper.CL
## L high 17.8 3207 155 126 2741 3672
## L_L high 17.8 3747 109 126 3418 4076
## L_S high 17.8 3492 155 126 3027 3958
## M high 17.8 3745 155 126 3280 4211
## M_M high 17.8 3651 109 126 3322 3980
## S high 17.8 3165 198 202 2574 3756
## S_L high 17.8 3853 155 126 3387 4318
## S_S high 17.8 3130 115 146 2784 3476
## L low 17.8 3547 155 126 3082 4012
## L_L low 17.8 3667 109 126 3338 3996
## L_S low 17.8 3614 155 126 3148 4079
## M low 17.8 3704 155 126 3238 4169
## M_M low 17.8 3583 109 126 3254 3912
## S low 17.8 3569 157 131 3098 4041
## S_L low 17.8 3849 155 126 3383 4314
## S_S low 17.8 3710 109 126 3380 4039
##
## Degrees-of-freedom method: satterthwaite
## Results are given on the get (not the response) scale.
## Confidence level used: 0.95
## Conf-level adjustment: sidak method for 16 estimates
# --- CODE EACH LEVEL OF DISTURBANCE AND ECOSYSTEM TYPE TO THEN PRODUCE CONTRASTS --- #
high_L = c(1, rep(0,15))
high_L_L = c(rep(0,1), 1, rep(0,14))
high_L_S = c(rep(0,2), 1, rep(0,13))
high_M = c(rep(0,3), 1, rep(0,12))
high_M_M = c(rep(0,4), 1, rep(0,11))
high_S = c(rep(0,5), 1, rep(0,10))
high_S_L = c(rep(0,6), 1, rep(0,9))
high_S_S = c(rep(0,7), 1, rep(0,8))
low_L = c(rep(0,8), 1, rep(0,7))
low_L_L = c(rep(0,9), 1, rep(0,6))
low_L_S = c(rep(0,10), 1, rep(0,5))
low_M = c(rep(0,11), 1, rep(0,4))
low_M_M = c(rep(0,12), 1, rep(0,3))
low_S = c(rep(0,13), 1, rep(0,2))
low_S_L = c(rep(0,14), 1, rep(0,1))
low_S_S = c(rep(0,15), 1)
# --- PRODUCE ECOSYSTEM TYPE CONTRASTS --- #
# Set parameters
n_of_digits = 3
# Compute constrasts
contrasts = contrast(emmeans_output,
method = list("high L - S" = high_L - high_S,
"high M - S" = high_M - high_S,
"low L - S" = low_L - low_S,
"low M - S" = low_M - low_S)) %>%
as.data.frame() %>%
mutate(p.value = round(p.value, digits = n_of_digits),
estimate = round(estimate, digits = n_of_digits),
SE = round(SE, digits = n_of_digits),
df = round(df, digits = n_of_digits),
t.ratio = round(t.ratio, digits = n_of_digits),
e = "",
e = ifelse(p.value > 0.1,
"",
e),
e = ifelse(p.value < 0.05,
"*",
e),
e = ifelse(p.value < 0.01,
"**",
e),
e = ifelse(p.value < 0.001,
"***",
e)) %>%
rename(" " = e)
# --- PRODUCE ECOSYSTEM TYPE CONTRASTS --- #
# Set parameters
n_of_digits = 3
# Compute constrasts
contrasts = contrast(emmeans_output,
method = list("high S_L - S" = high_S_L - high_S,
"high S_L - S_S" = high_S_L - high_S_S,
"high S_S - S" = high_S_S - high_S,
"high M_M - M" = high_M_M - high_M,
"high L_S - L" = high_L_S - high_L,
"high L_S - L_L" = high_L_S - high_L_L,
"high L_L - L" = high_L_L - high_L,
"low S_L - S" = low_S_L - low_S,
"low S_L - S_S" = low_S_L - low_S_S,
"low S_S - S" = low_S_S - low_S,
"low M_M - M" = low_M_M - low_M,
"low L_S - L" = low_L_S - low_L,
"low L_S - L_L" = low_L_S - low_L_L,
"low L_L - L" = low_L_L - low_L)) %>%
as.data.frame() %>%
mutate(p.value = round(p.value, digits = n_of_digits),
estimate = round(estimate, digits = n_of_digits),
SE = round(SE, digits = n_of_digits),
df = round(df, digits = n_of_digits),
t.ratio = round(t.ratio, digits = n_of_digits),
e = "",
e = ifelse(p.value > 0.1,
"",
e),
e = ifelse(p.value < 0.05,
"*",
e),
e = ifelse(p.value < 0.01,
"**",
e),
e = ifelse(p.value < 0.001,
"***",
e)) %>%
rename(" " = e)
# --- SHOW ECOSYSTEM TYPE CONSTRASTS --- #
contrasts
## contrast estimate SE df t.ratio p.value
## 1 high S_L - S 687.553 251.380 167.153 2.735 0.007 **
## 2 high S_L - S_S 723.004 193.148 132.515 3.743 0.000 ***
## 3 high S_S - S -35.451 229.229 185.647 -0.155 0.877
## 4 high M_M - M -93.928 189.642 125.539 -0.495 0.621
## 5 high L_S - L 285.749 218.980 125.539 1.305 0.194
## 6 high L_S - L_L -254.951 189.642 125.539 -1.344 0.181
## 7 high L_L - L 540.699 189.642 125.539 2.851 0.005 **
## 8 low S_L - S 279.243 220.580 128.428 1.266 0.208
## 9 low S_L - S_S 139.105 189.642 125.539 0.734 0.465
## 10 low S_S - S 140.138 191.487 129.392 0.732 0.466
## 11 low M_M - M -120.492 189.642 125.539 -0.635 0.526
## 12 low L_S - L 66.693 218.980 125.539 0.305 0.761
## 13 low L_S - L_L -53.114 189.642 125.539 -0.280 0.780
## 14 low L_L - L 119.807 189.642 125.539 0.632 0.529
# --- PLOT MODEL RESIDUALS --- #
# Simulate residuals from the fitted model
simulationOutput <- simulateResiduals(fittedModel = model,
n = 1000)
# Test for overdispersion in the model to check if variance exceeds mean
testDispersion(simulationOutput)
##
## DHARMa nonparametric dispersion test via sd of residuals fitted vs.
## simulated
##
## data: simulationOutput
## dispersion = 1.0018, p-value = 0.96
## alternative hypothesis: two.sided
# Plot Q-Q and Residuals vs Fitted plots
plot(simulationOutput)
response_variable_selected = "auto_hetero_ratio"
# --- ORIGINAL DATA - PLOT MEAN ± 95% CI --- #
p1 = plot.all.patches.points(data = ds_ecosystems %>%
filter(disturbance == "high"),
response_variable_selected) +
annotate("text",
x = Inf,
y = Inf,
label = "High disturbance",
hjust = 1.1,
vjust = 1.1,
size = 5,
fontface = "bold") + # Adjust position and style
theme(axis.title.x = element_blank(), # Remove x-axis title
axis.text.x = element_blank()) # Remove x-axis text
## Warning in qt(conf.interval/2 + 0.5, datac$N - 1): NaNs produced
p2 = plot.all.patches.points(data = ds_ecosystems %>%
filter(disturbance == "low"),
response_variable_selected) +
theme(legend.position = "none") +
annotate("text",
x = Inf,
y = Inf,
label = "Low disturbance",
hjust = 1.1,
vjust = 1.1,
size = 5,
fontface = "bold") # Adjust position and style
# Arrange the plots vertically
ggarrange(p1,
p2,
ncol = 1,
nrow = 2,
heights = c(1, 0.7))
## Warning: Removed 2 rows containing non-finite outside the scale range
## (`stat_summary()`).
## Warning: Removed 2 rows containing non-finite outside the scale range
## (`stat_summary()`).
# --- PREPARE DATA FOR ANALYSIS --- #
# Add baselines
baselines = ds_ecosystems %>%
filter(time_point == time_point_of_baselines) %>%
select(culture_ID,
all_of(response_variable_selected)) %>%
rename(baseline = all_of(response_variable_selected))
data_for_analysis = ds_ecosystems %>%
left_join(baselines)
# Filter data and change level names
data_for_analysis = data_for_analysis %>%
filter(time_point %in% time_points_model,
#!is.na(water_addition_ml),
!is.na(!!sym(response_variable_selected)),
!is.infinite(!!sym(response_variable_selected))) %>%
mutate(ecosystem_type = case_when(ecosystem_type == "Small unconnected" ~ "S",
ecosystem_type == "Medium unconnected" ~ "M",
ecosystem_type == "Large unconnected" ~ "L",
ecosystem_type == "Small connected to small" ~ "S_S",
ecosystem_type == "Small connected to large" ~ "S_L",
ecosystem_type == "Medium connected to medium" ~ "M_M",
ecosystem_type == "Large connected to large" ~ "L_L",
ecosystem_type == "Large connected to small" ~ "L_S",
TRUE ~ ecosystem_type)) %>%
ungroup()
# --- DATA FOR ANALYSIS - PLOT MEAN ± 95% CI --- #
p1 = plot.all.patches.points(data = data_for_analysis %>%
filter(disturbance == "high"),
response_variable_selected) +
annotate("text",
x = Inf,
y = Inf,
label = "High disturbance",
hjust = 1.1,
vjust = 1.1,
size = 5,
fontface = "bold") + # Adjust position and style
theme(axis.title.x = element_blank(), # Remove x-axis title
axis.text.x = element_blank()) # Remove x-axis text
## Warning in qt(conf.interval/2 + 0.5, datac$N - 1): NaNs produced
p2 = plot.all.patches.points(data = data_for_analysis %>%
filter(disturbance == "low"),
response_variable_selected) +
theme(legend.position = "none") +
annotate("text",
x = Inf,
y = Inf,
label = "Low disturbance",
hjust = 1.1,
vjust = 1.1,
size = 5,
fontface = "bold") # Adjust position and style
# Arrange the plots vertically
ggarrange(p1,
p2,
ncol = 1,
nrow = 2,
heights = c(1, 0.7))
# --- MODEL - CONSTRUCT --- #
# Define formula
formula = paste("get(response_variable_selected) ~",
"ecosystem_type * disturbance * day +",
"(day | culture_ID) +",
"(day | baseline)") %>%
print()
## [1] "get(response_variable_selected) ~ ecosystem_type * disturbance * day + (day | culture_ID) + (day | baseline)"
# Construct model
model = try.different.optimizers.ecos(data_for_analysis,
formula)
## [1] "Model successfully fitted with optimizer: bobyqa "
# --- MODEL - SUMMARY --- #
print(summary(model), digits = 1)
## Linear mixed model fit by maximum likelihood . t-tests use Satterthwaite's
## method [lmerModLmerTest]
## Formula: formula
## Data: data
## Control: lmerControl(optimizer = opt$optimizer)
##
## AIC BIC logLik deviance df.resid
## -447.3 -273.4 262.7 -525.3 600
##
## Scaled residuals:
## Min 1Q Median 3Q Max
## -3.5 -0.5 -0.1 0.3 8.0
##
## Random effects:
## Groups Name Variance Std.Dev. Corr
## culture_ID (Intercept) 1e-09 3e-05
## day 6e-12 2e-06 -1.00
## baseline (Intercept) 1e-02 1e-01
## day 9e-05 9e-03 -1.00
## Residual 2e-02 1e-01
## Number of obs: 639, groups: culture_ID, 109; baseline, 104
##
## Fixed effects:
## Estimate Std. Error df t value
## (Intercept) -2e-01 9e-02 2e+02 -1.7
## ecosystem_typeL_L 2e-01 1e-01 2e+02 1.9
## ecosystem_typeL_S 1e-01 1e-01 2e+02 0.9
## ecosystem_typeM 2e-01 1e-01 2e+02 1.7
## ecosystem_typeM_M 1e-01 1e-01 2e+02 1.2
## ecosystem_typeS 3e-01 1e-01 2e+02 2.2
## ecosystem_typeS_L 4e-01 1e-01 2e+02 2.9
## ecosystem_typeS_S 3e-01 1e-01 2e+02 3.0
## disturbancelow 4e-01 1e-01 2e+02 3.1
## day 2e-02 6e-03 1e+02 4.1
## ecosystem_typeL_L:disturbancelow -4e-01 2e-01 2e+02 -2.6
## ecosystem_typeL_S:disturbancelow -3e-01 2e-01 2e+02 -1.8
## ecosystem_typeM:disturbancelow -4e-01 2e-01 2e+02 -2.4
## ecosystem_typeM_M:disturbancelow -4e-01 2e-01 2e+02 -2.4
## ecosystem_typeS:disturbancelow -5e-01 2e-01 2e+02 -2.7
## ecosystem_typeS_L:disturbancelow -6e-01 2e-01 2e+02 -3.1
## ecosystem_typeS_S:disturbancelow -6e-01 2e-01 2e+02 -3.7
## ecosystem_typeL_L:day -2e-02 7e-03 1e+02 -2.7
## ecosystem_typeL_S:day -8e-03 8e-03 1e+02 -1.0
## ecosystem_typeM:day -2e-02 8e-03 1e+02 -2.7
## ecosystem_typeM_M:day -1e-02 7e-03 1e+02 -1.9
## ecosystem_typeS:day -3e-02 9e-03 1e+02 -3.3
## ecosystem_typeS_L:day -3e-02 8e-03 2e+02 -3.9
## ecosystem_typeS_S:day -3e-02 7e-03 1e+02 -4.5
## disturbancelow:day -3e-02 8e-03 1e+02 -3.5
## ecosystem_typeL_L:disturbancelow:day 3e-02 1e-02 1e+02 3.1
## ecosystem_typeL_S:disturbancelow:day 2e-02 1e-02 1e+02 1.7
## ecosystem_typeM:disturbancelow:day 4e-02 1e-02 1e+02 3.1
## ecosystem_typeM_M:disturbancelow:day 3e-02 1e-02 1e+02 2.7
## ecosystem_typeS:disturbancelow:day 4e-02 1e-02 1e+02 3.2
## ecosystem_typeS_L:disturbancelow:day 4e-02 1e-02 2e+02 3.3
## ecosystem_typeS_S:disturbancelow:day 4e-02 1e-02 1e+02 4.2
## Pr(>|t|)
## (Intercept) 0.098 .
## ecosystem_typeL_L 0.056 .
## ecosystem_typeL_S 0.375
## ecosystem_typeM 0.095 .
## ecosystem_typeM_M 0.228
## ecosystem_typeS 0.030 *
## ecosystem_typeS_L 0.004 **
## ecosystem_typeS_S 0.003 **
## disturbancelow 0.002 **
## day 7e-05 ***
## ecosystem_typeL_L:disturbancelow 0.009 **
## ecosystem_typeL_S:disturbancelow 0.070 .
## ecosystem_typeM:disturbancelow 0.016 *
## ecosystem_typeM_M:disturbancelow 0.016 *
## ecosystem_typeS:disturbancelow 0.008 **
## ecosystem_typeS_L:disturbancelow 0.002 **
## ecosystem_typeS_S:disturbancelow 2e-04 ***
## ecosystem_typeL_L:day 0.007 **
## ecosystem_typeL_S:day 0.298
## ecosystem_typeM:day 0.008 **
## ecosystem_typeM_M:day 0.066 .
## ecosystem_typeS:day 0.001 **
## ecosystem_typeS_L:day 1e-04 ***
## ecosystem_typeS_S:day 2e-05 ***
## disturbancelow:day 6e-04 ***
## ecosystem_typeL_L:disturbancelow:day 0.002 **
## ecosystem_typeL_S:disturbancelow:day 0.093 .
## ecosystem_typeM:disturbancelow:day 0.003 **
## ecosystem_typeM_M:disturbancelow:day 0.008 **
## ecosystem_typeS:disturbancelow:day 0.002 **
## ecosystem_typeS_L:disturbancelow:day 0.001 **
## ecosystem_typeS_S:disturbancelow:day 4e-05 ***
## ---
## Signif. codes: 0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1
## optimizer (bobyqa) convergence code: 0 (OK)
## boundary (singular) fit: see help('isSingular')
# --- MODEL - ANOVA --- #
car::Anova(model, type = "III")
## Analysis of Deviance Table (Type III Wald chisquare tests)
##
## Response: get(response_variable_selected)
## Chisq Df Pr(>Chisq)
## (Intercept) 2.7670 1 0.096224 .
## ecosystem_type 15.6134 7 0.028893 *
## disturbance 9.6489 1 0.001895 **
## day 17.0660 1 3.610e-05 ***
## ecosystem_type:disturbance 16.4222 7 0.021527 *
## ecosystem_type:day 32.9062 7 2.756e-05 ***
## disturbance:day 12.3798 1 0.000434 ***
## ecosystem_type:disturbance:day 22.2662 7 0.002285 **
## ---
## Signif. codes: 0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1
# --- ESTIMATE MARGINAL MEANS --- #
emmeans_output = emmeans(model,
specs = ~ ecosystem_type * disturbance * day,
adjust = "sidak",
bias.adj = TRUE,
lmer.df = "satterthwaite")
emmeans_output
## ecosystem_type disturbance day emmean SE df lower.CL upper.CL
## L high 17.8 0.2681 0.0349 135 0.16348 0.373
## L_L high 17.8 0.1483 0.0238 182 0.07714 0.219
## L_S high 17.8 0.2331 0.0349 135 0.12846 0.338
## M high 17.8 0.0943 0.0349 135 -0.01038 0.199
## M_M high 17.8 0.1727 0.0247 135 0.09868 0.247
## S high 17.8 0.0432 0.0439 202 -0.08773 0.174
## S_L high 17.8 0.1013 0.0330 251 0.00313 0.199
## S_S high 17.8 0.0389 0.0255 196 -0.03734 0.115
## L low 17.8 0.1648 0.0349 135 0.06019 0.269
## L_L low 17.8 0.1655 0.0238 182 0.09432 0.237
## L_S low 17.8 0.1403 0.0349 135 0.03565 0.245
## M low 17.8 0.1682 0.0349 135 0.06355 0.273
## M_M low 17.8 0.1581 0.0238 182 0.08700 0.229
## S low 17.8 0.1202 0.0354 141 0.01408 0.226
## S_L low 17.8 0.0773 0.0327 236 -0.01993 0.175
## S_S low 17.8 0.0918 0.0247 135 0.01782 0.166
##
## Degrees-of-freedom method: satterthwaite
## Results are given on the get (not the response) scale.
## Confidence level used: 0.95
## Conf-level adjustment: sidak method for 16 estimates
# --- CODE EACH LEVEL OF DISTURBANCE AND ECOSYSTEM TYPE TO THEN PRODUCE CONTRASTS --- #
high_L = c(1, rep(0,15))
high_L_L = c(rep(0,1), 1, rep(0,14))
high_L_S = c(rep(0,2), 1, rep(0,13))
high_M = c(rep(0,3), 1, rep(0,12))
high_M_M = c(rep(0,4), 1, rep(0,11))
high_S = c(rep(0,5), 1, rep(0,10))
high_S_L = c(rep(0,6), 1, rep(0,9))
high_S_S = c(rep(0,7), 1, rep(0,8))
low_L = c(rep(0,8), 1, rep(0,7))
low_L_L = c(rep(0,9), 1, rep(0,6))
low_L_S = c(rep(0,10), 1, rep(0,5))
low_M = c(rep(0,11), 1, rep(0,4))
low_M_M = c(rep(0,12), 1, rep(0,3))
low_S = c(rep(0,13), 1, rep(0,2))
low_S_L = c(rep(0,14), 1, rep(0,1))
low_S_S = c(rep(0,15), 1)
# --- PRODUCE ECOSYSTEM TYPE CONTRASTS --- #
# Set parameters
n_of_digits = 3
# Compute constrasts
contrasts = contrast(emmeans_output,
method = list("high L - S" = high_L - high_S,
"high M - S" = high_M - high_S,
"low L - S" = low_L - low_S,
"low M - S" = low_M - low_S)) %>%
as.data.frame() %>%
mutate(p.value = round(p.value, digits = n_of_digits),
estimate = round(estimate, digits = n_of_digits),
SE = round(SE, digits = n_of_digits),
df = round(df, digits = n_of_digits),
t.ratio = round(t.ratio, digits = n_of_digits),
e = "",
e = ifelse(p.value > 0.1,
"",
e),
e = ifelse(p.value < 0.05,
"*",
e),
e = ifelse(p.value < 0.01,
"**",
e),
e = ifelse(p.value < 0.001,
"***",
e)) %>%
rename(" " = e)
# --- PRODUCE ECOSYSTEM TYPE CONTRASTS --- #
# Set parameters
n_of_digits = 3
# Compute constrasts
contrasts = contrast(emmeans_output,
method = list("high S_L - S" = high_S_L - high_S,
"high S_L - S_S" = high_S_L - high_S_S,
"high S_S - S" = high_S_S - high_S,
"high M_M - M" = high_M_M - high_M,
"high L_S - L" = high_L_S - high_L,
"high L_S - L_L" = high_L_S - high_L_L,
"high L_L - L" = high_L_L - high_L,
"low S_L - S" = low_S_L - low_S,
"low S_L - S_S" = low_S_L - low_S_S,
"low S_S - S" = low_S_S - low_S,
"low M_M - M" = low_M_M - low_M,
"low L_S - L" = low_L_S - low_L,
"low L_S - L_L" = low_L_S - low_L_L,
"low L_L - L" = low_L_L - low_L)) %>%
as.data.frame() %>%
mutate(p.value = round(p.value, digits = n_of_digits),
estimate = round(estimate, digits = n_of_digits),
SE = round(SE, digits = n_of_digits),
df = round(df, digits = n_of_digits),
t.ratio = round(t.ratio, digits = n_of_digits),
e = "",
e = ifelse(p.value > 0.1,
"",
e),
e = ifelse(p.value < 0.05,
"*",
e),
e = ifelse(p.value < 0.01,
"**",
e),
e = ifelse(p.value < 0.001,
"***",
e)) %>%
rename(" " = e)
# --- SHOW ECOSYSTEM TYPE CONSTRASTS --- #
contrasts
## contrast estimate SE df t.ratio p.value
## 1 high S_L - S 0.058 0.055 219.633 1.058 0.291
## 2 high S_L - S_S 0.062 0.041 252.656 1.513 0.131
## 3 high S_S - S -0.004 0.051 200.761 -0.085 0.933
## 4 high M_M - M 0.078 0.043 134.931 1.836 0.069
## 5 high L_S - L -0.035 0.049 134.931 -0.710 0.479
## 6 high L_S - L_L 0.085 0.042 148.167 2.009 0.046 *
## 7 high L_L - L -0.120 0.042 148.167 -2.838 0.005 **
## 8 low S_L - S -0.043 0.048 178.563 -0.889 0.375
## 9 low S_L - S_S -0.014 0.041 192.327 -0.354 0.724
## 10 low S_S - S -0.028 0.043 138.966 -0.658 0.512
## 11 low M_M - M -0.010 0.042 148.167 -0.238 0.812
## 12 low L_S - L -0.025 0.049 134.931 -0.498 0.619
## 13 low L_S - L_L -0.025 0.042 148.167 -0.596 0.552
## 14 low L_L - L 0.001 0.042 148.167 0.015 0.988
# --- PLOT MODEL RESIDUALS --- #
# Simulate residuals from the fitted model
simulationOutput <- simulateResiduals(fittedModel = model,
n = 1000)
# Test for overdispersion in the model to check if variance exceeds mean
testDispersion(simulationOutput)
##
## DHARMa nonparametric dispersion test via sd of residuals fitted vs.
## simulated
##
## data: simulationOutput
## dispersion = 0.99756, p-value = 0.998
## alternative hypothesis: two.sided
# Plot Q-Q and Residuals vs Fitted plots
plot(simulationOutput)
# --- PLOT BIOMASS-SPECIES RICHNESS RELATIONSHIP --- #
ds_ecosystems %>%
filter(is.na(species_richness) != TRUE) %>%
ggplot(aes(x = species_richness,
y = bioarea_mm2_per_ml)) +
geom_point() +
xlim(0, length(protist_species)) +
labs(x = axis_names$axis_name[axis_names$variable == "species_richness"],
y = axis_names$axis_name[axis_names$variable == "bioarea_mm2_per_ml"]) +
theme_bw() +
theme(panel.grid.major = element_blank(),
panel.grid.minor = element_blank(),
legend.position = legend_position,
legend.key.width = unit(legend_width_cm, "cm"))
# --- PLOT BIOMASS-SHANNON RELATIONSHIP --- #
ds_ecosystems %>%
filter(is.na(shannon) != TRUE) %>%
ggplot(aes(x = shannon,
y = bioarea_mm2_per_ml)) +
geom_point() +
labs(x = axis_names$axis_name[axis_names$variable == "shannon"],
y = axis_names$axis_name[axis_names$variable == "bioarea_mm2_per_ml"]) +
theme_bw() +
theme(panel.grid.major = element_blank(),
panel.grid.minor = element_blank(),
legend.position = legend_position,
legend.key.width = unit(legend_width_cm, "cm"))
# --- PLOT BIOMASS-EVENESS RELATIONSHIP --- #
ds_ecosystems %>%
filter(is.na(evenness_pielou) != TRUE) %>%
ggplot(aes(x = evenness_pielou,
y = bioarea_mm2_per_ml)) +
geom_point() +
labs(x = axis_names$axis_name[axis_names$variable == "evenness_pielou"],
y = axis_names$axis_name[axis_names$variable == "bioarea_mm2_per_ml"]) +
theme_bw() +
theme(panel.grid.major = element_blank(),
panel.grid.minor = element_blank(),
legend.position = legend_position,
legend.key.width = unit(legend_width_cm, "cm"))
disturbance_selected = "high"
# --- PREPARE DATA FOR PLOTTING --- #
# Define what you want to plot
metaecosystem_type_selected = c("Medium-Medium",
"Small-Large")
# Filter dataset
data_for_plotting = ds_metaecosystems %>%
filter(metaecosystem_type %in% metaecosystem_type_selected,
disturbance == disturbance_selected)
# Write function to plot a response variable. Afterwards you can use this function to plot alpha, beta, gamma diversity, and biomass.
plot.single.plot = function(response_variable_selected){
data_for_plotting %>%
filter(metaecosystem_type %in% metaecosystem_type_selected,
!is.na(!!sym(response_variable_selected))) %>%
summarySE(measurevar = response_variable_selected,
groupvars = c("day", "ecosystem_size_symmetry", "connection")) %>%
ggplot(aes(x = day,
y = get(response_variable_selected),
group = interaction(day, ecosystem_size_symmetry, connection),
color = ecosystem_size_symmetry,
linetype = connection)) +
geom_point(stat = "summary",
fun = "mean",
position = position_dodge(dodging),
size = treatment_points_size) +
geom_line(stat = "summary",
fun = "mean",
aes(group = interaction(ecosystem_size_symmetry, connection)),
position = position_dodge(dodging),
linewidth = treatment_lines_linewidth) +
geom_errorbar(aes(ymax = get(response_variable_selected) + ci,
ymin = get(response_variable_selected) - ci),
width = width_errorbar,
position = position_dodge(dodging)) +
labs(x = axis_names$axis_name[axis_names$variable == "day"],
y = axis_names$axis_name[axis_names$variable == response_variable_selected],
color = "") +
scale_color_manual(values = treatment_colours_paper) +
scale_linetype_manual(values = treatment_linetype_paper) +
geom_vline(xintercept = resource_flow_days,
linetype = resource_flow_line_type,
color = resource_flow_line_colour,
linewidth = resource_flow_line_width) +
theme_bw() +
theme(panel.grid.major = element_blank(),
panel.grid.minor = element_blank(),
legend.position = legend_position,
legend.key.width = unit(legend_width_cm, "cm")) +
guides(color = guide_legend(title = NULL,
nrow = 2),
linetype = guide_legend(title = NULL,
nrow = 2)) +
theme(plot.margin = unit(c(ggarrange_margin_left,
ggarrange_margin_right,
ggarrange_margin_bottom,
ggarrange_margin_left),
"cm")) +
geom_rect(xmin = grey_background_xmin,
xmax = grey_background_xmax,
ymin = grey_background_ymin,
ymax = grey_background_ymax,
fill = grey_background_fill,
alpha = grey_background_alpha,
color = grey_background_color)
}
# Combine plots of alpha, beta, gamma biodiversity and biomass.
p_combined = ggarrange(plot.single.plot("mean_shannon") +
rremove("xlab") +
theme(axis.text.x = element_blank(),
axis.ticks.x = element_blank()) +
font("legend.text", size = paper_labels_size) +
font("ylab", size = paper_labels_size),
plot.single.plot("bray_curtis") +
rremove("xlab") +
theme(axis.text.x = element_blank(),
axis.ticks.x = element_blank()) +
font("legend.text",
size = paper_labels_size) +
font("ylab",
size = paper_labels_size),
plot.single.plot("metaecosystem_richness") +
rremove("xlab") +
theme(axis.text.x = element_blank(),
axis.ticks.x = element_blank()) +
font("legend.text",
size = paper_labels_size) +
font("ylab",
size = paper_labels_size),
plot.single.plot("total_metaecosystem_bioarea_mm2") +
font("legend.text",
size = paper_labels_size) +
font("xlab",
size = paper_labels_size) +
font("ylab",
size = paper_labels_size) +
scale_x_continuous(breaks = unique(data_for_plotting$day)),
heights = c(0.8, 0.8, 0.8, 1),
nrow = 4,
common.legend = TRUE,
align = "v",
labels = c("(a)", "(b)", "(c)", "(d)"),
label.x = 0.1,
label.y = 0.8) %>%
print()
# --- PREPARE DATA FOR PLOTTING --- #
# Define parameters
ecosystem_type_selected = c("Small connected to large",
"Small connected to small",
"Small unconnected",
"Large connected to small",
"Large connected to large",
"Large unconnected")
# Filter data
data_for_plotting = ds_ecosystems %>%
filter(ecosystem_type %in% ecosystem_type_selected,
disturbance == disturbance_selected)
# --- CONSTRUCT FUNCTION TO PLOT BIOMASS/SHANNON OF SMALL AND LARGE ECOSYSTEMS --- #
plot.single.plot = function(response_variable_selected){
data_for_plotting %>%
filter(!is.na(!!sym(response_variable_selected))) %>%
summarySE(measurevar = response_variable_selected,
groupvars = c("day", "time_point", "ecosystem_type", "ecosystem_size", "connection")) %>%
ggplot(aes(x = day,
y = get(response_variable_selected),
group = interaction(day, ecosystem_type),
color = ecosystem_type,
linetype = ecosystem_type)) +
geom_point(stat = "summary",
fun = "mean",
position = position_dodge(dodging),
size = treatment_points_size) +
geom_line(stat = "summary",
fun = "mean",
aes(group = ecosystem_type),
position = position_dodge(dodging),
linewidth = treatment_lines_linewidth) +
geom_errorbar(aes(ymax = get(response_variable_selected) + ci,
ymin = get(response_variable_selected) - ci),
width = width_errorbar,
position = position_dodge(dodging)) +
labs(x = axis_names$axis_name[axis_names$variable == "day"],
y = axis_names$axis_name[axis_names$variable == response_variable_selected],
color = "") +
scale_color_manual(values = c("#993404",
"#993404",
"#993404",
"#3182bd",
"#3182bd",
"#3182bd"),
label = expression(S[L],
S[S],
S,
L[S],
L[L],
L)) +
scale_linetype_manual(values = c("solid",
"dashed",
"dotted",
"solid",
"dashed",
"dotted"),
label = expression(S[L],
S[S],
S,
L[S],
L[L],
L)) +
geom_vline(xintercept = resource_flow_days,
linetype = resource_flow_line_type,
color = resource_flow_line_colour,
linewidth = resource_flow_line_width) +
geom_hline(yintercept = 0,
color = zero_line_colour,
linetype = zero_line_line_type,
linewidth = zero_line_line_width) +
theme_bw() +
theme(panel.grid.major = element_blank(),
panel.grid.minor = element_blank(),
legend.position = legend_position,
legend.key.width = unit(legend_width_cm, "cm")) +
guides(color = guide_legend(title = NULL,
nrow = 3),
linetype = guide_legend(title = NULL,
nrow = 3)) +
geom_rect(xmin = grey_background_xmin,
xmax = grey_background_xmax,
ymin = grey_background_ymin,
ymax = grey_background_ymax,
fill = grey_background_fill,
alpha = grey_background_alpha,
color = grey_background_color)
}
# --- COMBINE PLOTS OF SHANNON AND BIOMASS --- #
p_combined = ggarrange(plot.single.plot("shannon") +
rremove("xlab") +
theme(axis.text.x = element_blank(),
axis.ticks.x = element_blank()) +
font("legend.text",
size = paper_labels_size) +
font("ylab",
size = paper_labels_size),
plot.single.plot("bioarea_mm2_per_ml") +
font("legend.text",
size = paper_labels_size) +
font("xlab",
size = paper_labels_size) +
font("ylab",
size = paper_labels_size) +
scale_x_continuous(breaks = unique(data_for_plotting$day)),
heights = c(0.8, 0.8, 1),
nrow = 2,
align = "v",
labels = c("(a)", "(b)"),
label.x = 0.1,
label.y = 0.8,
common.legend = TRUE) %>%
print()
## Warning in qt(conf.interval/2 + 0.5, datac$N - 1): NaNs produced
# --- PREPARE DATA FOR PLOTTING --- #
# Define parameters
ecosystem_type_selected = c("Medium connected to medium",
"Medium unconnected")
# Filter data
data_for_plotting = ds_ecosystems %>%
filter(ecosystem_type %in% ecosystem_type_selected,
disturbance == disturbance_selected,
!is.na(!!sym(response_variable_selected)))
# --- CONSTRUCT FUNCTION TO PLOT BIOMASS/SHANNON OF MEDIUM ECOSYSTEMS --- #
plot.single.plot = function(response_variable_selected){
data_for_plotting %>%
summarySE(measurevar = response_variable_selected,
groupvars = c("day", "time_point", "ecosystem_type", "ecosystem_size", "connection")) %>%
ggplot(aes(x = day,
y = get(response_variable_selected),
group = interaction(day, ecosystem_type),
color = ecosystem_type,
linetype = ecosystem_type)) +
geom_point(stat = "summary",
fun = "mean",
position = position_dodge(dodging),
size = treatment_points_size) +
geom_line(stat = "summary",
fun = "mean",
aes(group = ecosystem_type),
position = position_dodge(dodging),
linewidth = treatment_lines_linewidth) +
geom_errorbar(aes(ymax = get(response_variable_selected) + ci,
ymin = get(response_variable_selected) - ci),
width = width_errorbar,
position = position_dodge(dodging)) +
labs(x = axis_names$axis_name[axis_names$variable == "day"],
y = axis_names$axis_name[axis_names$variable == response_variable_selected],
color = "") +
scale_color_manual(values = c("#d95f0e",
"#d95f0e"),
label = expression(M[M],
M)) +
scale_linetype_manual(values = c("dashed",
"dotted"),
label = expression(M[M],
M)) +
geom_vline(xintercept = resource_flow_days,
linetype = resource_flow_line_type,
color = resource_flow_line_colour,
linewidth = resource_flow_line_width) +
geom_hline(yintercept = 0,
color = zero_line_colour,
linetype = zero_line_line_type,
linewidth = zero_line_line_width) +
theme_bw() +
theme(panel.grid.major = element_blank(),
panel.grid.minor = element_blank(),
legend.position = legend_position,
legend.key.width = unit(legend_width_cm, "cm")) +
guides(color = guide_legend(title = NULL,
nrow = 3),
linetype = guide_legend(title = NULL,
nrow = 3)) +
geom_rect(xmin = grey_background_xmin,
xmax = grey_background_xmax,
ymin = grey_background_ymin,
ymax = grey_background_ymax,
fill = grey_background_fill,
alpha = grey_background_alpha,
color = grey_background_color)
}
# --- COMBINE PLOTS OF SHANNON AND BIOMASS --- #
p_combined = ggarrange(plot.single.plot("shannon") +
rremove("xlab") +
theme(axis.text.x = element_blank(),
axis.ticks.x = element_blank()) +
font("legend.text",
size = paper_labels_size) +
font("ylab",
size = paper_labels_size),
plot.single.plot("bioarea_mm2_per_ml") +
font("legend.text",
size = paper_labels_size) +
font("xlab",
size = paper_labels_size) +
font("ylab",
size = paper_labels_size) +
scale_x_continuous(breaks = unique(data_for_plotting$day)),
heights = c(0.8, 0.8, 1),
nrow = 2,
align = "v",
labels = c("(a)", "(b)"),
label.x = 0.1,
label.y = 0.8,
common.legend = TRUE) %>%
print()
# --- DEFINE RESPONSE VARIABLE YOU WANT TO PLOT --- #
response_variable = "auto_hetero_ratio"
# --- PREPARE DATA FOR PLOTTING --- #
# Define parameters
ecosystem_type_selected = c("S",
"M",
"L")
# Manipulate data
data_for_plotting = ds_ecosystems %>%
mutate(ecosystem_type = case_when(ecosystem_type == "Small unconnected" ~ "S",
ecosystem_type == "Medium unconnected" ~ "M",
ecosystem_type == "Large unconnected" ~ "L")) %>%
filter(ecosystem_type %in% ecosystem_type_selected,
disturbance == disturbance_selected,
!is.na(!!sym(response_variable)))
# --- CONSTRUCT PLOT --- #
p = data_for_plotting %>%
# Manipulate
summarySE(measurevar = response_variable,
groupvars = c("day", "ecosystem_type", "ecosystem_size", "connection")) %>%
# Create plot
ggplot(aes(x = day,
y = get(response_variable),
group = interaction(day, ecosystem_type),
color = ecosystem_type)) +
# Points
geom_point(stat = "summary",
fun = "mean",
position = position_dodge(dodging),
size = treatment_points_size) +
geom_errorbar(aes(ymax = get(response_variable) + ci,
ymin = get(response_variable) - ci),
width = width_errorbar,
position = position_dodge(dodging)) +
# Lines
geom_line(stat = "summary",
fun = "mean",
aes(group = ecosystem_type),
position = position_dodge(dodging),
linewidth = treatment_lines_linewidth) +
# Axes and legend
labs(x = axis_names$axis_name[axis_names$variable == "day"],
y = axis_names$axis_name[axis_names$variable == response_variable],
color = "") +
scale_x_continuous(breaks = unique(data_for_plotting$day)) +
guides(color = guide_legend(title = NULL,
nrow = 1),
linetype = guide_legend(title = NULL,
nrow = 1)) +
scale_color_manual(values = c("#000000",
"#737373",
"#bdbdbd")) +
# Extra graphic elements
theme_bw() +
theme(panel.grid.major = element_blank(),
panel.grid.minor = element_blank(),
legend.position = legend_position,
legend.key.width = unit(legend_width_cm, "cm"),
axis.title.x = element_text(size = paper_labels_size),
axis.title.y = element_text(size = paper_labels_size),
legend.text = element_text(size = paper_labels_size)) +
geom_rect(xmin = grey_background_xmin,
xmax = grey_background_xmax,
ymin = grey_background_ymin,
ymax = grey_background_ymax,
fill = grey_background_fill,
alpha = grey_background_alpha,
color = grey_background_color) +
geom_hline(yintercept = 0,
color = zero_line_colour,
linetype = zero_line_line_type,
linewidth = zero_line_line_width) +
geom_vline(xintercept = resource_flow_days,
linetype = resource_flow_line_type,
color = resource_flow_line_colour,
linewidth = resource_flow_line_width)
## Warning in qt(conf.interval/2 + 0.5, datac$N - 1): NaNs produced
p
## Warning: No shared levels found between `names(values)` of the manual scale and the
## data's linetype values.
## Warning: No shared levels found between `names(values)` of the manual scale and the
## data's colour values.
## Warning: No shared levels found between `names(values)` of the manual scale and the
## data's linetype values.
## Warning: No shared levels found between `names(values)` of the manual scale and the
## data's colour values.
## Warning: No shared levels found between `names(values)` of the manual scale and the
## data's linetype values.
## Warning: No shared levels found between `names(values)` of the manual scale and the
## data's colour values.
## Warning: No shared levels found between `names(values)` of the manual scale and the
## data's linetype values.
## Warning: No shared levels found between `names(values)` of the manual scale and the
## data's colour values.
## Warning: No shared levels found between `names(values)` of the manual scale and the
## data's linetype values.
## No shared levels found between `names(values)` of the manual scale and the
## data's linetype values.
## Warning: No shared levels found between `names(values)` of the manual scale and the
## data's colour values.
## Warning: No shared levels found between `names(values)` of the manual scale and the
## data's linetype values.
## Warning: No shared levels found between `names(values)` of the manual scale and the
## data's colour values.
## Warning: No shared levels found between `names(values)` of the manual scale and the
## data's linetype values.
## Warning: No shared levels found between `names(values)` of the manual scale and the
## data's colour values.
## Warning: No shared levels found between `names(values)` of the manual scale and the
## data's linetype values.
## Warning: No shared levels found between `names(values)` of the manual scale and the
## data's colour values.
## Warning: No shared levels found between `names(values)` of the manual scale and the
## data's linetype values.
## No shared levels found between `names(values)` of the manual scale and the
## data's linetype values.
## Warning: No shared levels found between `names(values)` of the manual scale and the
## data's colour values.
## Warning: No shared levels found between `names(values)` of the manual scale and the
## data's linetype values.
## Warning: No shared levels found between `names(values)` of the manual scale and the
## data's colour values.
## Warning: No shared levels found between `names(values)` of the manual scale and the
## data's linetype values.
## Warning: No shared levels found between `names(values)` of the manual scale and the
## data's colour values.
## Warning: No shared levels found between `names(values)` of the manual scale and the
## data's linetype values.
## Warning: No shared levels found between `names(values)` of the manual scale and the
## data's colour values.
## Warning: No shared levels found between `names(values)` of the manual scale and the
## data's linetype values.
## No shared levels found between `names(values)` of the manual scale and the
## data's linetype values.
## Warning: No shared levels found between `names(values)` of the manual scale and the
## data's colour values.
## Warning: No shared levels found between `names(values)` of the manual scale and the
## data's linetype values.
## Warning: No shared levels found between `names(values)` of the manual scale and the
## data's colour values.
## Warning: No shared levels found between `names(values)` of the manual scale and the
## data's linetype values.
## Warning: No shared levels found between `names(values)` of the manual scale and the
## data's colour values.
## Warning: No shared levels found between `names(values)` of the manual scale and the
## data's linetype values.
## Warning: No shared levels found between `names(values)` of the manual scale and the
## data's colour values.
## Warning: No shared levels found between `names(values)` of the manual scale and the
## data's linetype values.
## No shared levels found between `names(values)` of the manual scale and the
## data's linetype values.
## Warning: No shared levels found between `names(values)` of the manual scale and the
## data's colour values.
## Warning: No shared levels found between `names(values)` of the manual scale and the
## data's linetype values.
## Warning: No shared levels found between `names(values)` of the manual scale and the
## data's colour values.
## Warning: No shared levels found between `names(values)` of the manual scale and the
## data's linetype values.
## Warning: No shared levels found between `names(values)` of the manual scale and the
## data's colour values.
## Warning: No shared levels found between `names(values)` of the manual scale and the
## data's linetype values.
## Warning: No shared levels found between `names(values)` of the manual scale and the
## data's colour values.
## Warning: No shared levels found between `names(values)` of the manual scale and the
## data's linetype values.
## No shared levels found between `names(values)` of the manual scale and the
## data's linetype values.
## Warning: No shared levels found between `names(values)` of the manual scale and the
## data's colour values.
## Warning: No shared levels found between `names(values)` of the manual scale and the
## data's linetype values.
## Warning: No shared levels found between `names(values)` of the manual scale and the
## data's colour values.
## Warning: No shared levels found between `names(values)` of the manual scale and the
## data's linetype values.
## Warning: No shared levels found between `names(values)` of the manual scale and the
## data's colour values.
## Warning: No shared levels found between `names(values)` of the manual scale and the
## data's linetype values.
## Warning: No shared levels found between `names(values)` of the manual scale and the
## data's colour values.
## Warning: No shared levels found between `names(values)` of the manual scale and the
## data's linetype values.
## No shared levels found between `names(values)` of the manual scale and the
## data's linetype values.
## Warning: No shared levels found between `names(values)` of the manual scale and the
## data's colour values.
## Warning: No shared levels found between `names(values)` of the manual scale and the
## data's linetype values.
## Warning: No shared levels found between `names(values)` of the manual scale and the
## data's colour values.
## Warning: No shared levels found between `names(values)` of the manual scale and the
## data's linetype values.
## Warning: No shared levels found between `names(values)` of the manual scale and the
## data's colour values.
## Warning: No shared levels found between `names(values)` of the manual scale and the
## data's linetype values.
## Warning: No shared levels found between `names(values)` of the manual scale and the
## data's colour values.
## Warning: No shared levels found between `names(values)` of the manual scale and the
## data's linetype values.
## No shared levels found between `names(values)` of the manual scale and the
## data's linetype values.
## Warning: No shared levels found between `names(values)` of the manual scale and the
## data's colour values.
## Warning: No shared levels found between `names(values)` of the manual scale and the
## data's linetype values.
## Warning: No shared levels found between `names(values)` of the manual scale and the
## data's colour values.
## Warning: No shared levels found between `names(values)` of the manual scale and the
## data's linetype values.
## Warning: No shared levels found between `names(values)` of the manual scale and the
## data's colour values.
## Warning: No shared levels found between `names(values)` of the manual scale and the
## data's linetype values.
## Warning: No shared levels found between `names(values)` of the manual scale and the
## data's colour values.
## Warning: No shared levels found between `names(values)` of the manual scale and the
## data's linetype values.
## No shared levels found between `names(values)` of the manual scale and the
## data's linetype values.
## Warning: No shared levels found between `names(values)` of the manual scale and the
## data's colour values.
## Warning: No shared levels found between `names(values)` of the manual scale and the
## data's linetype values.
## Warning: No shared levels found between `names(values)` of the manual scale and the
## data's colour values.
## Warning: No shared levels found between `names(values)` of the manual scale and the
## data's linetype values.
## Warning: No shared levels found between `names(values)` of the manual scale and the
## data's colour values.
## Warning: No shared levels found between `names(values)` of the manual scale and the
## data's linetype values.
## Warning: No shared levels found between `names(values)` of the manual scale and the
## data's colour values.
## Warning: No shared levels found between `names(values)` of the manual scale and the
## data's linetype values.
## No shared levels found between `names(values)` of the manual scale and the
## data's linetype values.
## Warning: No shared levels found between `names(values)` of the manual scale and the
## data's colour values.
## Warning: No shared levels found between `names(values)` of the manual scale and the
## data's linetype values.
## Warning: No shared levels found between `names(values)` of the manual scale and the
## data's colour values.
## Warning: No shared levels found between `names(values)` of the manual scale and the
## data's linetype values.
## Warning: No shared levels found between `names(values)` of the manual scale and the
## data's colour values.
## Warning: No shared levels found between `names(values)` of the manual scale and the
## data's linetype values.
## Warning: No shared levels found between `names(values)` of the manual scale and the
## data's colour values.
## Warning: No shared levels found between `names(values)` of the manual scale and the
## data's linetype values.
## No shared levels found between `names(values)` of the manual scale and the
## data's linetype values.
## Warning: No shared levels found between `names(values)` of the manual scale and the
## data's colour values.
## Warning: No shared levels found between `names(values)` of the manual scale and the
## data's linetype values.
## Warning: No shared levels found between `names(values)` of the manual scale and the
## data's colour values.
## Warning: No shared levels found between `names(values)` of the manual scale and the
## data's linetype values.
## Warning: No shared levels found between `names(values)` of the manual scale and the
## data's colour values.
## Warning: No shared levels found between `names(values)` of the manual scale and the
## data's linetype values.
## Warning: No shared levels found between `names(values)` of the manual scale and the
## data's colour values.
## Warning: No shared levels found between `names(values)` of the manual scale and the
## data's linetype values.
## No shared levels found between `names(values)` of the manual scale and the
## data's linetype values.
## Warning: No shared levels found between `names(values)` of the manual scale and the
## data's colour values.
## Warning: No shared levels found between `names(values)` of the manual scale and the
## data's linetype values.
## Warning: No shared levels found between `names(values)` of the manual scale and the
## data's colour values.
## Warning: No shared levels found between `names(values)` of the manual scale and the
## data's linetype values.
## Warning: No shared levels found between `names(values)` of the manual scale and the
## data's colour values.
## Warning: No shared levels found between `names(values)` of the manual scale and the
## data's linetype values.
## Warning: No shared levels found between `names(values)` of the manual scale and the
## data's colour values.
## Warning: No shared levels found between `names(values)` of the manual scale and the
## data's linetype values.
# --- DEFINE PLOTTING PARAMETERS --- #
legend_row_n_input = 2
size_y_axis = 22
x_min = -0.2
x_max = 30
y_min = -0.8
y_max = 3
# --- PREPARE DATA FOR PLOTTING --- #
# Define parameters
ecosystem_type_all = c("Small connected to large",
"Small unconnected",
"Small connected to small",
"Large connected to small",
"Large unconnected",
"Large connected to large")
response_variable_selected = "shannon"
# Filter data
data_for_plotting = ds_ecosystems %>%
filter(ecosystem_type %in% ecosystem_type_all,
disturbance == disturbance_selected)
# --- CONSTRUCT PLOTS --- #
# Define parameters
plots = NULL
# Construct plots
plots[[1]] = plot.ecosystems.empty.presentations(y_min_alpha,
y_max_alpha,
"shannon")
for (i in 1:length(ecosystem_type_all)+1) {
plots[[i+1]] = plot.ecosystems.points.presentations(data = data_for_plotting,
ecosystem_type_selected = ecosystem_type_all[1:i],
response_variable_selected) +
theme(plot.margin = unit(c(ggarrange_margin_left,
ggarrange_margin_right,
ggarrange_margin_bottom,
ggarrange_margin_left),
"cm"),
legend.position = "none") +
scale_y_continuous(labels = scales::number_format(accuracy = 0.01)) +
xlim(x_min, x_max) +
ylim(y_min, y_max)
}
## Warning in qt(conf.interval/2 + 0.5, datac$N - 1): NaNs produced
## Warning in qt(conf.interval/2 + 0.5, datac$N - 1): NaNs produced
## Warning in qt(conf.interval/2 + 0.5, datac$N - 1): NaNs produced
## Warning in qt(conf.interval/2 + 0.5, datac$N - 1): NaNs produced
## Warning in qt(conf.interval/2 + 0.5, datac$N - 1): NaNs produced
## Warning in qt(conf.interval/2 + 0.5, datac$N - 1): NaNs produced
# --- COMBINE PLOTS --- #
# Define parameters
p_combined = NULL
p_combined[[1]] = plots[[1]]
# Combine plots
for(i in 1:length(ecosystem_type_all)+1){
p_combined[[i]] = ggarrange(plots[[i]] +
theme(axis.text = element_text(size = presentation_axes_size)) +
font("legend.text", size = size_legend) +
font("xlab", size = size_y_axis) +
font("ylab", size = size_y_axis),
nrow = 1,
align = "v",
label.x = 0.1,
label.y = 0.8)
}
## Warning: No shared levels found between `names(values)` of the manual scale and the
## data's linetype values.
## Warning: No shared levels found between `names(values)` of the manual scale and the
## data's colour values.
## Warning: No shared levels found between `names(values)` of the manual scale and the
## data's linetype values.
## Warning: No shared levels found between `names(values)` of the manual scale and the
## data's colour values.
## Warning: No shared levels found between `names(values)` of the manual scale and the
## data's linetype values.
## Warning: No shared levels found between `names(values)` of the manual scale and the
## data's colour values.
## Warning: No shared levels found between `names(values)` of the manual scale and the
## data's linetype values.
## Warning: No shared levels found between `names(values)` of the manual scale and the
## data's colour values.
## Warning: No shared levels found between `names(values)` of the manual scale and the
## data's linetype values.
## No shared levels found between `names(values)` of the manual scale and the
## data's linetype values.
## Warning: No shared levels found between `names(values)` of the manual scale and the
## data's colour values.
## Warning: No shared levels found between `names(values)` of the manual scale and the
## data's linetype values.
## Warning: No shared levels found between `names(values)` of the manual scale and the
## data's colour values.
## Warning: No shared levels found between `names(values)` of the manual scale and the
## data's linetype values.
## Warning: No shared levels found between `names(values)` of the manual scale and the
## data's colour values.
## Warning: No shared levels found between `names(values)` of the manual scale and the
## data's linetype values.
## Warning: No shared levels found between `names(values)` of the manual scale and the
## data's colour values.
## Warning: No shared levels found between `names(values)` of the manual scale and the
## data's linetype values.
## No shared levels found between `names(values)` of the manual scale and the
## data's linetype values.
## Warning: No shared levels found between `names(values)` of the manual scale and the
## data's colour values.
## Warning: No shared levels found between `names(values)` of the manual scale and the
## data's linetype values.
## Warning: No shared levels found between `names(values)` of the manual scale and the
## data's colour values.
## Warning: No shared levels found between `names(values)` of the manual scale and the
## data's linetype values.
## Warning: No shared levels found between `names(values)` of the manual scale and the
## data's colour values.
## Warning: No shared levels found between `names(values)` of the manual scale and the
## data's linetype values.
## Warning: No shared levels found between `names(values)` of the manual scale and the
## data's colour values.
## Warning: No shared levels found between `names(values)` of the manual scale and the
## data's linetype values.
## No shared levels found between `names(values)` of the manual scale and the
## data's linetype values.
## Warning: No shared levels found between `names(values)` of the manual scale and the
## data's colour values.
## Warning: No shared levels found between `names(values)` of the manual scale and the
## data's linetype values.
## Warning: No shared levels found between `names(values)` of the manual scale and the
## data's colour values.
## Warning: No shared levels found between `names(values)` of the manual scale and the
## data's linetype values.
## Warning: No shared levels found between `names(values)` of the manual scale and the
## data's colour values.
## Warning: No shared levels found between `names(values)` of the manual scale and the
## data's linetype values.
## Warning: No shared levels found between `names(values)` of the manual scale and the
## data's colour values.
## Warning: No shared levels found between `names(values)` of the manual scale and the
## data's linetype values.
## No shared levels found between `names(values)` of the manual scale and the
## data's linetype values.
## Warning: No shared levels found between `names(values)` of the manual scale and the
## data's colour values.
## Warning: No shared levels found between `names(values)` of the manual scale and the
## data's linetype values.
## Warning: No shared levels found between `names(values)` of the manual scale and the
## data's colour values.
## Warning: No shared levels found between `names(values)` of the manual scale and the
## data's linetype values.
## Warning: No shared levels found between `names(values)` of the manual scale and the
## data's colour values.
## Warning: No shared levels found between `names(values)` of the manual scale and the
## data's linetype values.
## Warning: No shared levels found between `names(values)` of the manual scale and the
## data's colour values.
## Warning: No shared levels found between `names(values)` of the manual scale and the
## data's linetype values.
## Warning: Removed 1 row containing missing values or values outside the scale range
## (`geom_point()`).
## Warning: Removed 1 row containing missing values or values outside the scale range
## (`geom_line()`).
# --- SAVE COMBINED PLOTS --- #
for(i in 1:length(ecosystem_type_all)+1){
png(file = here("3_results",
"figures",
disturbance_selected,
"presentations",
paste0("ecosystems_connected_diversity_", i, ".png")),
width = presentation_figure_width,
height = presentation_figure_height,
units = presentation_figure_units,
res = presentation_figure_res)
print(p_combined[[i]])
dev.off()
}
disturbance_selected = "low"
# --- PREPARE DATA FOR PLOTTING --- #
# Define what you want to plot
metaecosystem_type_selected = c("Medium-Medium",
"Small-Large")
# Filter dataset
data_for_plotting = ds_metaecosystems %>%
filter(metaecosystem_type %in% metaecosystem_type_selected,
disturbance == disturbance_selected)
# Write function to plot a response variable. Afterwards you can use this function to plot alpha, beta, gamma diversity, and biomass.
plot.single.plot = function(response_variable_selected){
data_for_plotting %>%
filter(metaecosystem_type %in% metaecosystem_type_selected,
!is.na(!!sym(response_variable_selected))) %>%
summarySE(measurevar = response_variable_selected,
groupvars = c("day", "ecosystem_size_symmetry", "connection")) %>%
ggplot(aes(x = day,
y = get(response_variable_selected),
group = interaction(day, ecosystem_size_symmetry, connection),
color = ecosystem_size_symmetry,
linetype = connection)) +
geom_point(stat = "summary",
fun = "mean",
position = position_dodge(dodging),
size = treatment_points_size) +
geom_line(stat = "summary",
fun = "mean",
aes(group = interaction(ecosystem_size_symmetry, connection)),
position = position_dodge(dodging),
linewidth = treatment_lines_linewidth) +
geom_errorbar(aes(ymax = get(response_variable_selected) + ci,
ymin = get(response_variable_selected) - ci),
width = width_errorbar,
position = position_dodge(dodging)) +
labs(x = axis_names$axis_name[axis_names$variable == "day"],
y = axis_names$axis_name[axis_names$variable == response_variable_selected],
color = "") +
scale_color_manual(values = treatment_colours_paper) +
scale_linetype_manual(values = treatment_linetype_paper) +
geom_vline(xintercept = resource_flow_days,
linetype = resource_flow_line_type,
color = resource_flow_line_colour,
linewidth = resource_flow_line_width) +
theme_bw() +
theme(panel.grid.major = element_blank(),
panel.grid.minor = element_blank(),
legend.position = legend_position,
legend.key.width = unit(legend_width_cm, "cm")) +
guides(color = guide_legend(title = NULL,
nrow = 2),
linetype = guide_legend(title = NULL,
nrow = 2)) +
theme(plot.margin = unit(c(ggarrange_margin_left,
ggarrange_margin_right,
ggarrange_margin_bottom,
ggarrange_margin_left),
"cm")) +
geom_rect(xmin = grey_background_xmin,
xmax = grey_background_xmax,
ymin = grey_background_ymin,
ymax = grey_background_ymax,
fill = grey_background_fill,
alpha = grey_background_alpha,
color = grey_background_color)
}
# Combine plots of alpha, beta, gamma biodiversity and biomass.
p_combined = ggarrange(plot.single.plot("mean_shannon") +
rremove("xlab") +
theme(axis.text.x = element_blank(),
axis.ticks.x = element_blank()) +
font("legend.text", size = paper_labels_size) +
font("ylab", size = paper_labels_size),
plot.single.plot("bray_curtis") +
rremove("xlab") +
theme(axis.text.x = element_blank(),
axis.ticks.x = element_blank()) +
font("legend.text",
size = paper_labels_size) +
font("ylab",
size = paper_labels_size),
plot.single.plot("metaecosystem_richness") +
rremove("xlab") +
theme(axis.text.x = element_blank(),
axis.ticks.x = element_blank()) +
font("legend.text",
size = paper_labels_size) +
font("ylab",
size = paper_labels_size),
plot.single.plot("total_metaecosystem_bioarea_mm2") +
font("legend.text",
size = paper_labels_size) +
font("xlab",
size = paper_labels_size) +
font("ylab",
size = paper_labels_size) +
scale_x_continuous(breaks = unique(data_for_plotting$day)),
heights = c(0.8, 0.8, 0.8, 1),
nrow = 4,
common.legend = TRUE,
align = "v",
labels = c("(a)", "(b)", "(c)", "(d)"),
label.x = 0.1,
label.y = 0.8) %>%
print()
# --- PREPARE DATA FOR PLOTTING --- #
# Define parameters
ecosystem_type_selected = c("Small connected to large",
"Small connected to small",
"Small unconnected",
"Large connected to small",
"Large connected to large",
"Large unconnected")
# Filter data
data_for_plotting = ds_ecosystems %>%
filter(ecosystem_type %in% ecosystem_type_selected,
disturbance == disturbance_selected)
# --- CONSTRUCT FUNCTION TO PLOT BIOMASS/SHANNON OF SMALL AND LARGE ECOSYSTEMS --- #
plot.single.plot = function(response_variable_selected){
data_for_plotting %>%
filter(!is.na(!!sym(response_variable_selected))) %>%
summarySE(measurevar = response_variable_selected,
groupvars = c("day", "time_point", "ecosystem_type", "ecosystem_size", "connection")) %>%
ggplot(aes(x = day,
y = get(response_variable_selected),
group = interaction(day, ecosystem_type),
color = ecosystem_type,
linetype = ecosystem_type)) +
geom_point(stat = "summary",
fun = "mean",
position = position_dodge(dodging),
size = treatment_points_size) +
geom_line(stat = "summary",
fun = "mean",
aes(group = ecosystem_type),
position = position_dodge(dodging),
linewidth = treatment_lines_linewidth) +
geom_errorbar(aes(ymax = get(response_variable_selected) + ci,
ymin = get(response_variable_selected) - ci),
width = width_errorbar,
position = position_dodge(dodging)) +
labs(x = axis_names$axis_name[axis_names$variable == "day"],
y = axis_names$axis_name[axis_names$variable == response_variable_selected],
color = "") +
scale_color_manual(values = c("#993404",
"#993404",
"#993404",
"#3182bd",
"#3182bd",
"#3182bd"),
label = expression(S[L],
S[S],
S,
L[S],
L[L],
L)) +
scale_linetype_manual(values = c("solid",
"dashed",
"dotted",
"solid",
"dashed",
"dotted"),
label = expression(S[L],
S[S],
S,
L[S],
L[L],
L)) +
geom_vline(xintercept = resource_flow_days,
linetype = resource_flow_line_type,
color = resource_flow_line_colour,
linewidth = resource_flow_line_width) +
geom_hline(yintercept = 0,
color = zero_line_colour,
linetype = zero_line_line_type,
linewidth = zero_line_line_width) +
theme_bw() +
theme(panel.grid.major = element_blank(),
panel.grid.minor = element_blank(),
legend.position = legend_position,
legend.key.width = unit(legend_width_cm, "cm")) +
guides(color = guide_legend(title = NULL,
nrow = 3),
linetype = guide_legend(title = NULL,
nrow = 3)) +
geom_rect(xmin = grey_background_xmin,
xmax = grey_background_xmax,
ymin = grey_background_ymin,
ymax = grey_background_ymax,
fill = grey_background_fill,
alpha = grey_background_alpha,
color = grey_background_color)
}
# --- COMBINE PLOTS OF SHANNON AND BIOMASS --- #
p_combined = ggarrange(plot.single.plot("shannon") +
rremove("xlab") +
theme(axis.text.x = element_blank(),
axis.ticks.x = element_blank()) +
font("legend.text",
size = paper_labels_size) +
font("ylab",
size = paper_labels_size),
plot.single.plot("bioarea_mm2_per_ml") +
font("legend.text",
size = paper_labels_size) +
font("xlab",
size = paper_labels_size) +
font("ylab",
size = paper_labels_size) +
scale_x_continuous(breaks = unique(data_for_plotting$day)),
heights = c(0.8, 0.8, 1),
nrow = 2,
align = "v",
labels = c("(a)", "(b)"),
label.x = 0.1,
label.y = 0.8,
common.legend = TRUE) %>%
print()
# --- PREPARE DATA FOR PLOTTING --- #
# Define parameters
ecosystem_type_selected = c("Medium connected to medium",
"Medium unconnected")
# Filter data
data_for_plotting = ds_ecosystems %>%
filter(ecosystem_type %in% ecosystem_type_selected,
disturbance == disturbance_selected,
!is.na(!!sym(response_variable_selected)))
# --- CONSTRUCT FUNCTION TO PLOT BIOMASS/SHANNON OF MEDIUM ECOSYSTEMS --- #
plot.single.plot = function(response_variable_selected){
data_for_plotting %>%
summarySE(measurevar = response_variable_selected,
groupvars = c("day", "time_point", "ecosystem_type", "ecosystem_size", "connection")) %>%
ggplot(aes(x = day,
y = get(response_variable_selected),
group = interaction(day, ecosystem_type),
color = ecosystem_type,
linetype = ecosystem_type)) +
geom_point(stat = "summary",
fun = "mean",
position = position_dodge(dodging),
size = treatment_points_size) +
geom_line(stat = "summary",
fun = "mean",
aes(group = ecosystem_type),
position = position_dodge(dodging),
linewidth = treatment_lines_linewidth) +
geom_errorbar(aes(ymax = get(response_variable_selected) + ci,
ymin = get(response_variable_selected) - ci),
width = width_errorbar,
position = position_dodge(dodging)) +
labs(x = axis_names$axis_name[axis_names$variable == "day"],
y = axis_names$axis_name[axis_names$variable == response_variable_selected],
color = "") +
scale_color_manual(values = c("#d95f0e",
"#d95f0e"),
label = expression(M[M],
M)) +
scale_linetype_manual(values = c("dashed",
"dotted"),
label = expression(M[M],
M)) +
geom_vline(xintercept = resource_flow_days,
linetype = resource_flow_line_type,
color = resource_flow_line_colour,
linewidth = resource_flow_line_width) +
geom_hline(yintercept = 0,
color = zero_line_colour,
linetype = zero_line_line_type,
linewidth = zero_line_line_width) +
theme_bw() +
theme(panel.grid.major = element_blank(),
panel.grid.minor = element_blank(),
legend.position = legend_position,
legend.key.width = unit(legend_width_cm, "cm")) +
guides(color = guide_legend(title = NULL,
nrow = 3),
linetype = guide_legend(title = NULL,
nrow = 3)) +
geom_rect(xmin = grey_background_xmin,
xmax = grey_background_xmax,
ymin = grey_background_ymin,
ymax = grey_background_ymax,
fill = grey_background_fill,
alpha = grey_background_alpha,
color = grey_background_color)
}
# --- COMBINE PLOTS OF SHANNON AND BIOMASS --- #
p_combined = ggarrange(plot.single.plot("shannon") +
rremove("xlab") +
theme(axis.text.x = element_blank(),
axis.ticks.x = element_blank()) +
font("legend.text",
size = paper_labels_size) +
font("ylab",
size = paper_labels_size),
plot.single.plot("bioarea_mm2_per_ml") +
font("legend.text",
size = paper_labels_size) +
font("xlab",
size = paper_labels_size) +
font("ylab",
size = paper_labels_size) +
scale_x_continuous(breaks = unique(data_for_plotting$day)),
heights = c(0.8, 0.8, 1),
nrow = 2,
align = "v",
labels = c("(a)", "(b)"),
label.x = 0.1,
label.y = 0.8,
common.legend = TRUE) %>%
print()
# --- DEFINE RESPONSE VARIABLE YOU WANT TO PLOT --- #
response_variable = "auto_hetero_ratio"
# --- PREPARE DATA FOR PLOTTING --- #
# Define parameters
ecosystem_type_selected = c("S",
"M",
"L")
# Manipulate data
data_for_plotting = ds_ecosystems %>%
mutate(ecosystem_type = case_when(ecosystem_type == "Small unconnected" ~ "S",
ecosystem_type == "Medium unconnected" ~ "M",
ecosystem_type == "Large unconnected" ~ "L")) %>%
filter(ecosystem_type %in% ecosystem_type_selected,
disturbance == disturbance_selected,
!is.na(!!sym(response_variable)))
# --- CONSTRUCT PLOT --- #
p = data_for_plotting %>%
# Manipulate
summarySE(measurevar = response_variable,
groupvars = c("day", "ecosystem_type", "ecosystem_size", "connection")) %>%
# Create plot
ggplot(aes(x = day,
y = get(response_variable),
group = interaction(day, ecosystem_type),
color = ecosystem_type)) +
# Points
geom_point(stat = "summary",
fun = "mean",
position = position_dodge(dodging),
size = treatment_points_size) +
geom_errorbar(aes(ymax = get(response_variable) + ci,
ymin = get(response_variable) - ci),
width = width_errorbar,
position = position_dodge(dodging)) +
# Lines
geom_line(stat = "summary",
fun = "mean",
aes(group = ecosystem_type),
position = position_dodge(dodging),
linewidth = treatment_lines_linewidth) +
# Axes and legend
labs(x = axis_names$axis_name[axis_names$variable == "day"],
y = axis_names$axis_name[axis_names$variable == response_variable],
color = "") +
scale_x_continuous(breaks = unique(data_for_plotting$day)) +
guides(color = guide_legend(title = NULL,
nrow = 1),
linetype = guide_legend(title = NULL,
nrow = 1)) +
scale_color_manual(values = c("#000000",
"#737373",
"#bdbdbd")) +
# Extra graphic elements
theme_bw() +
theme(panel.grid.major = element_blank(),
panel.grid.minor = element_blank(),
legend.position = legend_position,
legend.key.width = unit(legend_width_cm, "cm"),
axis.title.x = element_text(size = paper_labels_size),
axis.title.y = element_text(size = paper_labels_size),
legend.text = element_text(size = paper_labels_size)) +
geom_rect(xmin = grey_background_xmin,
xmax = grey_background_xmax,
ymin = grey_background_ymin,
ymax = grey_background_ymax,
fill = grey_background_fill,
alpha = grey_background_alpha,
color = grey_background_color) +
geom_hline(yintercept = 0,
color = zero_line_colour,
linetype = zero_line_line_type,
linewidth = zero_line_line_width) +
geom_vline(xintercept = resource_flow_days,
linetype = resource_flow_line_type,
color = resource_flow_line_colour,
linewidth = resource_flow_line_width)
p
## Warning: No shared levels found between `names(values)` of the manual scale and the
## data's linetype values.
## Warning: No shared levels found between `names(values)` of the manual scale and the
## data's colour values.
## Warning: No shared levels found between `names(values)` of the manual scale and the
## data's linetype values.
## Warning: No shared levels found between `names(values)` of the manual scale and the
## data's colour values.
## Warning: No shared levels found between `names(values)` of the manual scale and the
## data's linetype values.
## Warning: No shared levels found between `names(values)` of the manual scale and the
## data's colour values.
## Warning: No shared levels found between `names(values)` of the manual scale and the
## data's linetype values.
## Warning: No shared levels found between `names(values)` of the manual scale and the
## data's colour values.
## Warning: No shared levels found between `names(values)` of the manual scale and the
## data's linetype values.
## No shared levels found between `names(values)` of the manual scale and the
## data's linetype values.
## Warning: No shared levels found between `names(values)` of the manual scale and the
## data's colour values.
## Warning: No shared levels found between `names(values)` of the manual scale and the
## data's linetype values.
## Warning: No shared levels found between `names(values)` of the manual scale and the
## data's colour values.
## Warning: No shared levels found between `names(values)` of the manual scale and the
## data's linetype values.
## Warning: No shared levels found between `names(values)` of the manual scale and the
## data's colour values.
## Warning: No shared levels found between `names(values)` of the manual scale and the
## data's linetype values.
## Warning: No shared levels found between `names(values)` of the manual scale and the
## data's colour values.
## Warning: No shared levels found between `names(values)` of the manual scale and the
## data's linetype values.
## No shared levels found between `names(values)` of the manual scale and the
## data's linetype values.
## Warning: No shared levels found between `names(values)` of the manual scale and the
## data's colour values.
## Warning: No shared levels found between `names(values)` of the manual scale and the
## data's linetype values.
## Warning: No shared levels found between `names(values)` of the manual scale and the
## data's colour values.
## Warning: No shared levels found between `names(values)` of the manual scale and the
## data's linetype values.
## Warning: No shared levels found between `names(values)` of the manual scale and the
## data's colour values.
## Warning: No shared levels found between `names(values)` of the manual scale and the
## data's linetype values.
## Warning: No shared levels found between `names(values)` of the manual scale and the
## data's colour values.
## Warning: No shared levels found between `names(values)` of the manual scale and the
## data's linetype values.
## No shared levels found between `names(values)` of the manual scale and the
## data's linetype values.
## Warning: No shared levels found between `names(values)` of the manual scale and the
## data's colour values.
## Warning: No shared levels found between `names(values)` of the manual scale and the
## data's linetype values.
## Warning: No shared levels found between `names(values)` of the manual scale and the
## data's colour values.
## Warning: No shared levels found between `names(values)` of the manual scale and the
## data's linetype values.
## Warning: No shared levels found between `names(values)` of the manual scale and the
## data's colour values.
## Warning: No shared levels found between `names(values)` of the manual scale and the
## data's linetype values.
## Warning: No shared levels found between `names(values)` of the manual scale and the
## data's colour values.
## Warning: No shared levels found between `names(values)` of the manual scale and the
## data's linetype values.
## No shared levels found between `names(values)` of the manual scale and the
## data's linetype values.
## Warning: No shared levels found between `names(values)` of the manual scale and the
## data's colour values.
## Warning: No shared levels found between `names(values)` of the manual scale and the
## data's linetype values.
## Warning: No shared levels found between `names(values)` of the manual scale and the
## data's colour values.
## Warning: No shared levels found between `names(values)` of the manual scale and the
## data's linetype values.
## Warning: No shared levels found between `names(values)` of the manual scale and the
## data's colour values.
## Warning: No shared levels found between `names(values)` of the manual scale and the
## data's linetype values.
## Warning: No shared levels found between `names(values)` of the manual scale and the
## data's colour values.
## Warning: No shared levels found between `names(values)` of the manual scale and the
## data's linetype values.
## No shared levels found between `names(values)` of the manual scale and the
## data's linetype values.
## Warning: No shared levels found between `names(values)` of the manual scale and the
## data's colour values.
## Warning: No shared levels found between `names(values)` of the manual scale and the
## data's linetype values.
## Warning: No shared levels found between `names(values)` of the manual scale and the
## data's colour values.
## Warning: No shared levels found between `names(values)` of the manual scale and the
## data's linetype values.
## Warning: No shared levels found between `names(values)` of the manual scale and the
## data's colour values.
## Warning: No shared levels found between `names(values)` of the manual scale and the
## data's linetype values.
## Warning: No shared levels found between `names(values)` of the manual scale and the
## data's colour values.
## Warning: No shared levels found between `names(values)` of the manual scale and the
## data's linetype values.
## No shared levels found between `names(values)` of the manual scale and the
## data's linetype values.
## Warning: No shared levels found between `names(values)` of the manual scale and the
## data's colour values.
## Warning: No shared levels found between `names(values)` of the manual scale and the
## data's linetype values.
## Warning: No shared levels found between `names(values)` of the manual scale and the
## data's colour values.
## Warning: No shared levels found between `names(values)` of the manual scale and the
## data's linetype values.
## Warning: No shared levels found between `names(values)` of the manual scale and the
## data's colour values.
## Warning: No shared levels found between `names(values)` of the manual scale and the
## data's linetype values.
## Warning: No shared levels found between `names(values)` of the manual scale and the
## data's colour values.
## Warning: No shared levels found between `names(values)` of the manual scale and the
## data's linetype values.
## No shared levels found between `names(values)` of the manual scale and the
## data's linetype values.
## Warning: No shared levels found between `names(values)` of the manual scale and the
## data's colour values.
## Warning: No shared levels found between `names(values)` of the manual scale and the
## data's linetype values.
## Warning: No shared levels found between `names(values)` of the manual scale and the
## data's colour values.
## Warning: No shared levels found between `names(values)` of the manual scale and the
## data's linetype values.
## Warning: No shared levels found between `names(values)` of the manual scale and the
## data's colour values.
## Warning: No shared levels found between `names(values)` of the manual scale and the
## data's linetype values.
## Warning: No shared levels found between `names(values)` of the manual scale and the
## data's colour values.
## Warning: No shared levels found between `names(values)` of the manual scale and the
## data's linetype values.
## No shared levels found between `names(values)` of the manual scale and the
## data's linetype values.
## Warning: No shared levels found between `names(values)` of the manual scale and the
## data's colour values.
## Warning: No shared levels found between `names(values)` of the manual scale and the
## data's linetype values.
## Warning: No shared levels found between `names(values)` of the manual scale and the
## data's colour values.
## Warning: No shared levels found between `names(values)` of the manual scale and the
## data's linetype values.
## Warning: No shared levels found between `names(values)` of the manual scale and the
## data's colour values.
## Warning: No shared levels found between `names(values)` of the manual scale and the
## data's linetype values.
## Warning: No shared levels found between `names(values)` of the manual scale and the
## data's colour values.
## Warning: No shared levels found between `names(values)` of the manual scale and the
## data's linetype values.
## No shared levels found between `names(values)` of the manual scale and the
## data's linetype values.
## Warning: No shared levels found between `names(values)` of the manual scale and the
## data's colour values.
## Warning: No shared levels found between `names(values)` of the manual scale and the
## data's linetype values.
## Warning: No shared levels found between `names(values)` of the manual scale and the
## data's colour values.
## Warning: No shared levels found between `names(values)` of the manual scale and the
## data's linetype values.
## Warning: No shared levels found between `names(values)` of the manual scale and the
## data's colour values.
## Warning: No shared levels found between `names(values)` of the manual scale and the
## data's linetype values.
## Warning: No shared levels found between `names(values)` of the manual scale and the
## data's colour values.
## Warning: No shared levels found between `names(values)` of the manual scale and the
## data's linetype values.
## No shared levels found between `names(values)` of the manual scale and the
## data's linetype values.
## Warning: No shared levels found between `names(values)` of the manual scale and the
## data's colour values.
## Warning: No shared levels found between `names(values)` of the manual scale and the
## data's linetype values.
## Warning: No shared levels found between `names(values)` of the manual scale and the
## data's colour values.
## Warning: No shared levels found between `names(values)` of the manual scale and the
## data's linetype values.
## Warning: No shared levels found between `names(values)` of the manual scale and the
## data's colour values.
## Warning: No shared levels found between `names(values)` of the manual scale and the
## data's linetype values.
## Warning: No shared levels found between `names(values)` of the manual scale and the
## data's colour values.
## Warning: No shared levels found between `names(values)` of the manual scale and the
## data's linetype values.
## No shared levels found between `names(values)` of the manual scale and the
## data's linetype values.
## Warning: No shared levels found between `names(values)` of the manual scale and the
## data's colour values.
## Warning: No shared levels found between `names(values)` of the manual scale and the
## data's linetype values.
## Warning: No shared levels found between `names(values)` of the manual scale and the
## data's colour values.
## Warning: No shared levels found between `names(values)` of the manual scale and the
## data's linetype values.
## Warning: No shared levels found between `names(values)` of the manual scale and the
## data's colour values.
## Warning: No shared levels found between `names(values)` of the manual scale and the
## data's linetype values.
## Warning: No shared levels found between `names(values)` of the manual scale and the
## data's colour values.
## Warning: No shared levels found between `names(values)` of the manual scale and the
## data's linetype values.
# --- DEFINE PLOTTING PARAMETERS --- #
legend_row_n_input = 2
size_y_axis = 22
x_min = -0.2
x_max = 30
y_min = -0.8
y_max = 3
# --- PREPARE DATA FOR PLOTTING --- #
# Define parameters
ecosystem_type_all = c("Small connected to large",
"Small unconnected",
"Small connected to small",
"Large connected to small",
"Large unconnected",
"Large connected to large")
response_variable_selected = "shannon"
# Filter data
data_for_plotting = ds_ecosystems %>%
filter(ecosystem_type %in% ecosystem_type_all,
disturbance == disturbance_selected)
# --- CONSTRUCT PLOTS --- #
# Define parameters
plots = NULL
# Construct plots
plots[[1]] = plot.ecosystems.empty.presentations(y_min_alpha,
y_max_alpha,
"shannon")
for (i in 1:length(ecosystem_type_all)+1) {
plots[[i+1]] = plot.ecosystems.points.presentations(data = data_for_plotting,
ecosystem_type_selected = ecosystem_type_all[1:i],
response_variable_selected) +
theme(plot.margin = unit(c(ggarrange_margin_left,
ggarrange_margin_right,
ggarrange_margin_bottom,
ggarrange_margin_left),
"cm"),
legend.position = "none") +
scale_y_continuous(labels = scales::number_format(accuracy = 0.01)) +
xlim(x_min, x_max) +
ylim(y_min, y_max)
}
# --- COMBINE PLOTS --- #
# Define parameters
p_combined = NULL
p_combined[[1]] = plots[[1]]
# Combine plots
for(i in 1:length(ecosystem_type_all)+1){
p_combined[[i]] = ggarrange(plots[[i]] +
theme(axis.text = element_text(size = presentation_axes_size)) +
font("legend.text", size = size_legend) +
font("xlab", size = size_y_axis) +
font("ylab", size = size_y_axis),
nrow = 1,
align = "v",
label.x = 0.1,
label.y = 0.8)
}
## Warning: No shared levels found between `names(values)` of the manual scale and the
## data's linetype values.
## Warning: No shared levels found between `names(values)` of the manual scale and the
## data's colour values.
## Warning: No shared levels found between `names(values)` of the manual scale and the
## data's linetype values.
## Warning: No shared levels found between `names(values)` of the manual scale and the
## data's colour values.
## Warning: No shared levels found between `names(values)` of the manual scale and the
## data's linetype values.
## Warning: No shared levels found between `names(values)` of the manual scale and the
## data's colour values.
## Warning: No shared levels found between `names(values)` of the manual scale and the
## data's linetype values.
## Warning: No shared levels found between `names(values)` of the manual scale and the
## data's colour values.
## Warning: No shared levels found between `names(values)` of the manual scale and the
## data's linetype values.
## No shared levels found between `names(values)` of the manual scale and the
## data's linetype values.
## Warning: No shared levels found between `names(values)` of the manual scale and the
## data's colour values.
## Warning: No shared levels found between `names(values)` of the manual scale and the
## data's linetype values.
## Warning: No shared levels found between `names(values)` of the manual scale and the
## data's colour values.
## Warning: No shared levels found between `names(values)` of the manual scale and the
## data's linetype values.
## Warning: No shared levels found between `names(values)` of the manual scale and the
## data's colour values.
## Warning: No shared levels found between `names(values)` of the manual scale and the
## data's linetype values.
## Warning: No shared levels found between `names(values)` of the manual scale and the
## data's colour values.
## Warning: No shared levels found between `names(values)` of the manual scale and the
## data's linetype values.
## No shared levels found between `names(values)` of the manual scale and the
## data's linetype values.
## Warning: No shared levels found between `names(values)` of the manual scale and the
## data's colour values.
## Warning: No shared levels found between `names(values)` of the manual scale and the
## data's linetype values.
## Warning: No shared levels found between `names(values)` of the manual scale and the
## data's colour values.
## Warning: No shared levels found between `names(values)` of the manual scale and the
## data's linetype values.
## Warning: No shared levels found between `names(values)` of the manual scale and the
## data's colour values.
## Warning: No shared levels found between `names(values)` of the manual scale and the
## data's linetype values.
## Warning: No shared levels found between `names(values)` of the manual scale and the
## data's colour values.
## Warning: No shared levels found between `names(values)` of the manual scale and the
## data's linetype values.
## No shared levels found between `names(values)` of the manual scale and the
## data's linetype values.
## Warning: No shared levels found between `names(values)` of the manual scale and the
## data's colour values.
## Warning: No shared levels found between `names(values)` of the manual scale and the
## data's linetype values.
## Warning: No shared levels found between `names(values)` of the manual scale and the
## data's colour values.
## Warning: No shared levels found between `names(values)` of the manual scale and the
## data's linetype values.
## Warning: No shared levels found between `names(values)` of the manual scale and the
## data's colour values.
## Warning: No shared levels found between `names(values)` of the manual scale and the
## data's linetype values.
## Warning: No shared levels found between `names(values)` of the manual scale and the
## data's colour values.
## Warning: No shared levels found between `names(values)` of the manual scale and the
## data's linetype values.
## No shared levels found between `names(values)` of the manual scale and the
## data's linetype values.
## Warning: No shared levels found between `names(values)` of the manual scale and the
## data's colour values.
## Warning: No shared levels found between `names(values)` of the manual scale and the
## data's linetype values.
## Warning: No shared levels found between `names(values)` of the manual scale and the
## data's colour values.
## Warning: No shared levels found between `names(values)` of the manual scale and the
## data's linetype values.
## Warning: No shared levels found between `names(values)` of the manual scale and the
## data's colour values.
## Warning: No shared levels found between `names(values)` of the manual scale and the
## data's linetype values.
## Warning: No shared levels found between `names(values)` of the manual scale and the
## data's colour values.
## Warning: No shared levels found between `names(values)` of the manual scale and the
## data's linetype values.
## Warning: Removed 1 row containing missing values or values outside the scale range
## (`geom_point()`).
## Warning: Removed 1 row containing missing values or values outside the scale range
## (`geom_line()`).
# --- SAVE COMBINED PLOTS --- #
for(i in 1:length(ecosystem_type_all)+1){
png(file = here("3_results",
"figures",
disturbance_selected,
"presentations",
paste0("ecosystems_connected_diversity_", i, ".png")),
width = presentation_figure_width,
height = presentation_figure_height,
units = presentation_figure_units,
res = presentation_figure_res)
print(p_combined[[i]])
dev.off()
}
To analyse the videos I took of the ecosystems, I used the package BEMOVI. For this, I had to use the powerful computer. Below is the code utilised for video analysis on the powerful computer.
# Clear workspace
rm(list = ls())
# Set working directory
setwd("/media/mendel-himself/ID_061_Ema2/PatchSizePilot/training")
# Load required libraries
# library(devtools)
# install_github("femoerman/bemovi", ref="master")
library(bemovi)
library(parallel)
library(doParallel)
library(foreach)
# Define memory allocation parameters (in MB)
memory.alloc <- 240000 # Total memory allocated
memory.per.identifier <- 40000 # Memory per identifier
memory.per.linker <- 5000 # Memory per linker
memory.per.overlay <- 60000 # Memory per overlay
# Set paths for tools and particle linker
tools.path <- "/home/mendel-himself/bemovi_tools/" # Path to tools folder
to.particlelinker <- tools.path
# Set directories and file names
to.data <- paste(getwd(), "/", sep = "")
video.description.folder <- "0_video_description/"
video.description.file <- "video_description.txt"
raw.video.folder <- "1_raw/"
raw.avi.folder <- "1a_raw_avi/"
metadata.folder <- "1b_raw_meta/"
particle.data.folder <- "2_particle_data/"
trajectory.data.folder <- "3_trajectory_data/"
temp.overlay.folder <- "4a_temp_overlays/"
overlay.folder <- "4_overlays/"
merged.data.folder <- "5_merged_data/"
ijmacs.folder <- "ijmacs/"
######################################################################
# VIDEO PARAMETERS
# Define video parameters
fps <- 25 # Video frame rate (frames per second)
total_frames <- 125 # Total length of video (frames)
width <- 2048 # Video width (pixels)
height <- 2048 # Video height (pixels)
measured_volume <- 34.4 # Measured volume (microliters) for Leica M205 C with 1.6 fold magnification, sample height 0.5 mm and Hamamatsu Orca Flash 4
pixel_to_scale <- 4.05 # Size of a pixel (micrometers) for Leica M205 C with 1.6 fold magnification, sample height 0.5 mm and Hamamatsu Orca Flash 4
video.format <- "cxd" # Video file format (avi, cxd, mov, tiff)
difference.lag <- 10 # Difference lag
thresholds <- c(13, 255) # Threshold values of pixel intensity (considered a measure of pixel "whiteness") for determining if a pixel belongs to an individual rather than the background
######################################################################
# FILTERING PARAMETERS
# optimized for Perfex Pro 10 stereomicrocope with Perfex SC38800 (IDS UI-3880LE-M-GL) camera
# tested stereomicroscopes: Perfex Pro 10, Nikon SMZ1500, Leica M205 C
# tested cameras: Perfex SC38800, Canon 5D Mark III, Hamamatsu Orca Flash 4
# tested species: Tet, Col, Pau, Pca, Eug, Chi, Ble, Ceph, Lox, Spi
particle_min_size <- 10 # Minimum particle size (pixels)
particle_max_size <- 1000 # Maximum particle size (pixels)
trajectory_link_range <- 3 # Number of adjacent frames for linking particles
trajectory_displacement <- 16 # Maximum displacement of a particle between frames
# Filtering criteria
filter_min_net_disp <- 25 # Minimum net displacement (µm)
filter_min_duration <- 1 # Minimum duration (s)
filter_detection_freq <- 0.1 # Minimum detection frequency (1/s)
filter_median_step_length <- 3 # Minimum median step length (µm)
######################################################################
# VIDEO ANALYSIS
# Check if all tools are installed and set permissions
check_tools_folder(tools.path)
system(paste0("chmod a+x ", tools.path, "bftools/bf.sh"))
system(paste0("chmod a+x ", tools.path, "bftools/bfconvert"))
system(paste0("chmod a+x ", tools.path, "bftools/showinf"))
# Convert video files to compressed avi format
convert_to_avi(to.data,
raw.video.folder,
raw.avi.folder,
metadata.folder,
tools.path,
fps,
video.format)
# Uncomment the following lines for testing
# check_video_file_names(to.data, raw.avi.folder, video.description.folder, video.description.file)
# check_threshold_values(to.data, raw.avi.folder, ijmacs.folder, 2, difference.lag, thresholds, tools.path, memory.alloc)
# Identify particles in the video
locate_and_measure_particles(to.data,
raw.avi.folder,
particle.data.folder,
difference.lag,
min_size = particle_min_size,
max_size = particle_max_size,
thresholds = thresholds,
tools.path,
memory = memory.alloc,
memory.per.identifier = memory.per.identifier,
max.cores = detectCores() - 1)
# Link particles across frames to form trajectories
link_particles(to.data,
particle.data.folder,
trajectory.data.folder,
linkrange = trajectory_link_range,
disp = trajectory_displacement,
start_vid = 1,
memory = memory.alloc,
memory_per_linkerProcess = memory.per.linker,
raw.avi.folder,
max.cores = detectCores() - 1,
max_time = 1)
# Merge video description file with particle data
merge_data(to.data,
particle.data.folder,
trajectory.data.folder,
video.description.folder,
video.description.file,
merged.data.folder)
# Load the merged data
load(paste0(to.data, merged.data.folder, "Master.RData"))
# Filter trajectory data based on defined criteria
trajectory.data.filtered <- filter_data(trajectory.data,
filter_min_net_disp,
filter_min_duration,
filter_detection_freq,
filter_median_step_length)
# Summarize trajectory data to individual-based data
morph_mvt <- summarize_trajectories(trajectory.data.filtered,
calculate.median = F,
write = T,
to.data,
merged.data.folder)
# Summarize sample level data
summarize_populations(trajectory.data.filtered,
morph_mvt,
write = T,
to.data,
merged.data.folder,
video.description.folder,
video.description.file,
total_frames)
# Create overlays for validation
create.subtitle.overlays(to.data,
traj.data = trajectory.data.filtered,
raw.video.folder,
raw.avi.folder,
temp.overlay.folder,
overlay.folder,
fps,
vid.length = total_frames / fps,
width,
height,
tools.path = tools.path,
overlay.type = "number",
video.format)
# Create overlays (old method)
create_overlays(traj.data = trajectory.data.filtered,
to.data = to.data,
merged.data.folder = merged.data.folder,
raw.video.folder = raw.avi.folder,
temp.overlay.folder = "4a_temp_overlays_old/",
overlay.folder = "4_overlays_old/",
width = width,
height = height,
difference.lag = difference.lag,
type = "traj",
predict_spec = F,
contrast.enhancement = 1,
IJ.path = "/home/mendel-himself/bemovi_tools",
memory = memory.alloc,
max.cores = detectCores() - 1,
memory.per.overlay = memory.per.overlay)
To avoid transferring all the data from the powerful computer, I performed species identification on that system and subsequently imported the results into the Rstudio folder on my personal computer. Below is the code utilised for species identification on the powerful computer.
# Clear the workspace
rm(list = ls())
# Uncomment and install required packages if not already installed
#install.packages("e1071",dependencies = T)
#install.packages("devtools",dependencies = T)
#install_github("pennekampster/bemovi", ref="master")
#library(devtools)
# Load required libraries
library(bemovi)
library(e1071)
library("here")
library("tidyverse")
# Define time points in the experiment
time_points_in_experiment = c("t0", "t1", "t2", "t3", "t4", "t5", "t6", "t7")
# Loop through each time point in the experiment
for (time_point in time_points_in_experiment) {
# Define folder names and paths
video.description.folder = "0_video_description/"
video.description.file = "video_description.txt"
merged.data.folder = "5_merged_data/"
monocultures_folder_path = here("biomass_analysis", "training", "")
mixed_cultures_folder_path = here("biomass_analysis", time_point, "")
#Parameters used in the video analysis script
fps = 25
nsv = 5
measured_volume = 34.4
pixel_to_scale = 4.05
filter_min_net_disp = 25
filter_min_duration = 1
filter_detection_freq = 0.1
filter_median_step_length = 3
# Load master dataset of mono-cultures
load(paste0(monocultures_folder_path, merged.data.folder, "Master.RData"))
trajectory.data_monocultures = trajectory.data
rm(trajectory.data)
# Filter the master data of mono-cultures using the same parameters as in the video analysis script
trajectory.data_monocultures.filtered = filter_data(trajectory.data_monocultures,
filter_min_net_disp,
filter_min_duration,
filter_detection_freq,
filter_median_step_length)
# Summarize trajectory data to individual-based data
morph_mvt = summarize_trajectories(data = trajectory.data_monocultures.filtered,
calculate.median = FALSE,
write = TRUE,
to.data = monocultures_folder_path,
merged.data.folder = merged.data.folder) %>%
mutate(comment = NULL)
# Prepare training data by removing incomplete cases
training_data = morph_mvt[complete.cases(morph_mvt), ]
# Train SVM model on the training data
svm1 = svm(
factor(species) ~
mean_grey +
sd_grey +
mean_area +
sd_area +
mean_perimeter +
mean_turning +
sd_turning +
sd_perimeter +
mean_major +
sd_major +
mean_minor +
sd_minor +
mean_ar +
sd_ar +
duration +
max_net +
net_disp +
net_speed +
gross_disp +
max_step +
min_step +
sd_step +
sd_gross_speed +
max_gross_speed +
min_gross_speed ,
data = training_data,
probability = T,
na.action = na.pass)
# Generate and print confusion matrix
confusion.matrix = table(svm1$fitted, training_data$species)
confusion.matrix.nd = confusion.matrix
diag(confusion.matrix.nd) = 0
svm1$confusion = cbind(confusion.matrix,
class.error = rowSums(confusion.matrix.nd) / rowSums(confusion.matrix))
print(paste("Confusion matrix of time point", time_point))
print(svm1$confusion)
# Extract unique species names
species.names = unique(trajectory.data_monocultures$species)
# Load mixed cultures dataset
load(paste0(mixed_cultures_folder_path, merged.data.folder, "Master.RData"))
trajectory.data_mixed = trajectory.data
rm(trajectory.data)
# Filter mixed cultures data using the same parameters
trajectory.data_mixed.filtered = filter_data(trajectory.data_mixed,
filter_min_net_disp,
filter_min_duration,
filter_detection_freq,
filter_median_step_length)
# Summarize trajectory data to individual-based data
morph_mvt = summarize_trajectories(data = trajectory.data_mixed.filtered,
calculate.median = FALSE,
write = TRUE,
to.data = mixed_cultures_folder_path,
merged.data.folder = merged.data.folder)[, which(colnames(morph_mvt) != "Col_manual")] %>%
mutate(comment = NULL)
# Prepare data for prediction by removing incomplete cases
data.to.predict = morph_mvt[complete.cases(morph_mvt),]
# Predict species using the trained SVM model
p.id = predict(object = svm1, data.to.predict, type = "response")
data.to.predict$predicted_species = as.character(p.id)
# Summarize population data
pop.data = summarize_populations(traj.data = trajectory.data_monocultures.filtered,
sum.data = morph_mvt,
write = TRUE,
to.data = mixed_cultures_folder_path,
merged.data.folder = merged.data.folder,
video.description.folder = video.description.folder,
video.description.file = video.description.file,
total_frame = fps * nsv)
# Function to calculate species density
species.density = function(sample_output,
indiv_predicted,
species_names,
total_frames,
mv = measured_volume) {
samples = unique(indiv_predicted$file)
sp.dens = matrix(0,
nrow(sample_output),
length(species_names))
colnames(sp.dens) = species_names
for (i in 1:length(samples)) {
indiv = subset(indiv_predicted, file == samples[i])
spec = unique(indiv$predicted_species)
for (j in 1:length(spec)) {
all.indiv.sp = subset(indiv,
predicted_species == spec[j])
dens = sum(all.indiv.sp$N_frames) / total_frames / mv
sp.dens[which(sample_output$file == as.character(samples[i])), which(species_names == spec[j])] = dens
}
}
return(cbind(sample_output, sp.dens))
}
# Calculate species density for the current time point
output = species.density(pop.data,
data.to.predict,
species.names,
total_frames = fps * nsv,
mv = measured_volume)
# Save the species density results to a CSV file
file_name = paste0("species_ID_", time_point, ".csv")
write.csv(output, here("biomass_analysis", "species_ID_results", file_name))
rm(output)
}
## Time difference of 33.1 mins
If you want to change a certain part of the code using the following code in Unix:
#Rmd script
cd /Users/Ema/Documents/Github/PatchSize/3_r_files
sed -i '' 's/old_string/new_string/g' *.Rmd
#R script
cd /Users/ema/Documents/GitHub/PatchSize/3_r_files/functions
sed -i '' 's/old_string/new_string/g' *.R
you want to share a dataset and get a reproducible object, use the following R code:
dput()
The only type of ecosystem where all cultures crashed was small connected to small at high disturbance.
R.version.string
## [1] "R version 4.3.2 (2023-10-31)"
The R packages we used with their version are as follows:
sessionInfo()
## R version 4.3.2 (2023-10-31)
## Platform: aarch64-apple-darwin20 (64-bit)
## Running under: macOS Sonoma 14.2.1
##
## Matrix products: default
## BLAS: /Library/Frameworks/R.framework/Versions/4.3-arm64/Resources/lib/libRblas.0.dylib
## LAPACK: /Library/Frameworks/R.framework/Versions/4.3-arm64/Resources/lib/libRlapack.dylib; LAPACK version 3.11.0
##
## locale:
## [1] en_US.UTF-8/en_US.UTF-8/en_US.UTF-8/C/en_US.UTF-8/en_US.UTF-8
##
## time zone: Europe/Zurich
## tzcode source: internal
##
## attached base packages:
## [1] stats graphics grDevices utils datasets methods base
##
## other attached packages:
## [1] conflicted_1.2.0 broom.mixed_0.2.9.5 emmeans_1.10.4
## [4] combinat_0.0-8 Rmisc_1.5.1 betapart_1.6
## [7] vegan_2.6-6.1 lattice_0.22-6 permute_0.9-7
## [10] glmmTMB_1.1.10 lmerTest_3.1-3 lme4_1.1-35.4
## [13] Matrix_1.6-5 DHARMa_0.4.7 GGally_2.2.1
## [16] gridExtra_2.3 plotly_4.10.4 ggpubr_0.6.0
## [19] lubridate_1.9.3 forcats_1.0.0 stringr_1.5.1
## [22] dplyr_1.1.4 purrr_1.0.2 readr_2.1.5
## [25] tidyr_1.3.1 tibble_3.2.1 ggplot2_3.5.1
## [28] tidyverse_2.0.0 plyr_1.8.9 renv_1.0.7.9000
## [31] testthat_3.2.1.1 here_1.0.1
##
## loaded via a namespace (and not attached):
## [1] RColorBrewer_1.1-3 rstudioapi_0.16.0 jsonlite_1.8.8
## [4] magrittr_2.0.3 estimability_1.5.1 farver_2.1.2
## [7] nloptr_2.1.1 rmarkdown_2.27 vctrs_0.6.5
## [10] memoise_2.0.1 minqa_1.2.7 rstatix_0.7.2
## [13] htmltools_0.5.8.1 itertools_0.1-3 broom_1.0.6
## [16] pracma_2.4.4 sass_0.4.9 parallelly_1.38.0
## [19] bslib_0.7.0 desc_1.4.3 htmlwidgets_1.6.4
## [22] cachem_1.1.0 TMB_1.9.15 mime_0.12
## [25] lifecycle_1.0.4 minpack.lm_1.2-4 iterators_1.0.14
## [28] pkgconfig_2.0.3 gap_1.6 optimx_2023-10.21
## [31] R6_2.5.1 fastmap_1.2.0 shiny_1.9.1
## [34] rbibutils_2.2.16 future_1.34.0 magic_1.6-1
## [37] digest_0.6.36 numDeriv_2016.8-1.1 colorspace_2.1-0
## [40] furrr_0.3.1 rprojroot_2.0.4 pkgload_1.3.4
## [43] qgam_1.3.4 labeling_0.4.3 fansi_1.0.6
## [46] timechange_0.3.0 httr_1.4.7 abind_1.4-5
## [49] mgcv_1.9-0 compiler_4.3.2 doParallel_1.0.17
## [52] withr_3.0.0 backports_1.5.0 carData_3.0-5
## [55] ggstats_0.6.0 highr_0.11 ggsignif_0.6.4
## [58] MASS_7.3-60 tools_4.3.2 ape_5.8
## [61] httpuv_1.6.15 glue_1.7.0 rcdd_1.6
## [64] promises_1.3.0 nlme_3.1-163 grid_4.3.2
## [67] cluster_2.1.4 generics_0.1.3 snow_0.4-4
## [70] gtable_0.3.5 tzdb_0.4.0 data.table_1.15.4
## [73] hms_1.1.3 car_3.1-2 utf8_1.2.4
## [76] foreach_1.5.2 pillar_1.9.0 later_1.3.2
## [79] splines_4.3.2 tidyselect_1.2.1 knitr_1.47
## [82] reformulas_0.3.0 xfun_0.45 brio_1.1.5
## [85] stringi_1.8.4 lazyeval_0.2.2 yaml_2.3.8
## [88] boot_1.3-30 evaluate_0.24.0 codetools_0.2-20
## [91] cli_3.6.3 xtable_1.8-4 geometry_0.4.7
## [94] Rdpack_2.6.1 munsell_0.5.1 jquerylib_0.1.4
## [97] Rcpp_1.0.12 doSNOW_1.0.20 globals_0.16.3
## [100] coda_0.19-4.1 parallel_4.3.2 picante_1.8.2
## [103] gap.datasets_0.0.6 listenv_0.9.1 viridisLite_0.4.2
## [106] mvtnorm_1.3-1 scales_1.3.0 rlang_1.1.4
## [109] cowplot_1.1.3 fastmatch_1.1-4 waldo_0.5.2